repo_id stringclasses 875
values | size int64 974 38.9k | file_path stringlengths 10 308 | content stringlengths 974 38.9k |
|---|---|---|---|
apache/ozone | 37,275 | hadoop-ozone/recon/src/test/java/org/apache/hadoop/ozone/recon/fsck/TestContainerHealthTask.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ozone.recon.fsck;
import static org.apache.hadoop.hdds.protocol.proto.HddsProtos.ReplicationFactor.THREE;
import static org.apache.ozone.recon.schema.ContainerSchemaDefinition.UnHealthyContainerStates.ALL_REPLICAS_BAD;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.ArgumentMatchers.anyInt;
import static org.mockito.ArgumentMatchers.anyString;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.times;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import java.io.IOException;
import java.lang.reflect.Field;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Collections;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.UUID;
import org.apache.hadoop.hdds.client.RatisReplicationConfig;
import org.apache.hadoop.hdds.client.ReplicatedReplicationConfig;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.protocol.DatanodeDetails;
import org.apache.hadoop.hdds.protocol.MockDatanodeDetails;
import org.apache.hadoop.hdds.protocol.proto.HddsProtos;
import org.apache.hadoop.hdds.protocol.proto.StorageContainerDatanodeProtocolProtos.ContainerReplicaProto.State;
import org.apache.hadoop.hdds.scm.ContainerPlacementStatus;
import org.apache.hadoop.hdds.scm.PlacementPolicy;
import org.apache.hadoop.hdds.scm.container.ContainerID;
import org.apache.hadoop.hdds.scm.container.ContainerInfo;
import org.apache.hadoop.hdds.scm.container.ContainerManager;
import org.apache.hadoop.hdds.scm.container.ContainerReplica;
import org.apache.hadoop.hdds.scm.container.TestContainerInfo;
import org.apache.hadoop.hdds.scm.container.common.helpers.ContainerWithPipeline;
import org.apache.hadoop.hdds.scm.container.placement.algorithms.ContainerPlacementStatusDefault;
import org.apache.hadoop.ozone.recon.metrics.ContainerHealthMetrics;
import org.apache.hadoop.ozone.recon.persistence.AbstractReconSqlDBTest;
import org.apache.hadoop.ozone.recon.persistence.ContainerHealthSchemaManager;
import org.apache.hadoop.ozone.recon.scm.ReconStorageContainerManagerFacade;
import org.apache.hadoop.ozone.recon.spi.ReconContainerMetadataManager;
import org.apache.hadoop.ozone.recon.spi.StorageContainerServiceProvider;
import org.apache.hadoop.ozone.recon.tasks.ReconTaskConfig;
import org.apache.hadoop.ozone.recon.tasks.updater.ReconTaskStatusUpdater;
import org.apache.hadoop.ozone.recon.tasks.updater.ReconTaskStatusUpdaterManager;
import org.apache.ozone.recon.schema.ContainerSchemaDefinition;
import org.apache.ozone.recon.schema.generated.tables.daos.ReconTaskStatusDao;
import org.apache.ozone.recon.schema.generated.tables.daos.UnhealthyContainersDao;
import org.apache.ozone.recon.schema.generated.tables.pojos.ReconTaskStatus;
import org.apache.ozone.recon.schema.generated.tables.pojos.UnhealthyContainers;
import org.apache.ozone.test.LambdaTestUtils;
import org.junit.jupiter.api.Test;
/**
* Class to test a single run of the Container Health Task.
*/
public class TestContainerHealthTask extends AbstractReconSqlDBTest {
public TestContainerHealthTask() {
super();
}
@SuppressWarnings("checkstyle:methodlength")
@Test
public void testRun() throws Exception {
UnhealthyContainersDao unHealthyContainersTableHandle =
getDao(UnhealthyContainersDao.class);
ContainerHealthSchemaManager containerHealthSchemaManager =
new ContainerHealthSchemaManager(
getSchemaDefinition(ContainerSchemaDefinition.class),
unHealthyContainersTableHandle);
ReconStorageContainerManagerFacade scmMock =
mock(ReconStorageContainerManagerFacade.class);
ReconContainerMetadataManager reconContainerMetadataManager =
mock(ReconContainerMetadataManager.class);
MockPlacementPolicy placementMock = new MockPlacementPolicy();
ContainerManager containerManagerMock = mock(ContainerManager.class);
StorageContainerServiceProvider scmClientMock =
mock(StorageContainerServiceProvider.class);
ContainerReplica unhealthyReplicaMock = mock(ContainerReplica.class);
when(unhealthyReplicaMock.getState()).thenReturn(State.UNHEALTHY);
ContainerReplica healthyReplicaMock = mock(ContainerReplica.class);
when(healthyReplicaMock.getState()).thenReturn(State.CLOSED);
// Create 7 containers. The first 5 will have various unhealthy states
// defined below. The container with ID=6 will be healthy and
// container with ID=7 will be EMPTY_MISSING (but not inserted into DB)
List<ContainerInfo> mockContainers = getMockContainers(8);
when(scmMock.getScmServiceProvider()).thenReturn(scmClientMock);
when(scmMock.getContainerManager()).thenReturn(containerManagerMock);
when(containerManagerMock.getContainers(any(ContainerID.class),
anyInt())).thenReturn(mockContainers);
for (ContainerInfo c : mockContainers) {
when(containerManagerMock.getContainer(c.containerID())).thenReturn(c);
when(scmClientMock.getContainerWithPipeline(c.getContainerID()))
.thenReturn(new ContainerWithPipeline(c, null));
}
ReplicatedReplicationConfig replicationConfig = RatisReplicationConfig.getInstance(THREE);
// Under replicated
ContainerInfo containerInfo1 =
TestContainerInfo.newBuilderForTest().setContainerID(1).setReplicationConfig(replicationConfig).build();
when(containerManagerMock.getContainer(ContainerID.valueOf(1L))).thenReturn(containerInfo1);
when(containerManagerMock.getContainerReplicas(containerInfo1.containerID()))
.thenReturn(getMockReplicas(1L, State.CLOSED, State.UNHEALTHY));
// return all UNHEALTHY replicas for container ID 2 -> UNDER_REPLICATED
ContainerInfo containerInfo2 =
TestContainerInfo.newBuilderForTest().setContainerID(2).setReplicationConfig(replicationConfig).build();
when(containerManagerMock.getContainer(ContainerID.valueOf(2L))).thenReturn(containerInfo2);
when(containerManagerMock.getContainerReplicas(containerInfo2.containerID()))
.thenReturn(getMockReplicas(2L, State.UNHEALTHY));
// return 0 replicas for container ID 3 -> EMPTY_MISSING (will not be inserted into DB)
ContainerInfo containerInfo3 =
TestContainerInfo.newBuilderForTest().setContainerID(3).setReplicationConfig(replicationConfig).build();
when(containerManagerMock.getContainer(ContainerID.valueOf(3L))).thenReturn(containerInfo3);
when(containerManagerMock.getContainerReplicas(containerInfo3.containerID()))
.thenReturn(Collections.emptySet());
// Return 5 Healthy Replicas -> Over-replicated
ContainerInfo containerInfo4 =
TestContainerInfo.newBuilderForTest().setContainerID(4).setReplicationConfig(replicationConfig).build();
when(containerManagerMock.getContainer(ContainerID.valueOf(4L))).thenReturn(containerInfo4);
when(containerManagerMock.getContainerReplicas(containerInfo4.containerID()))
.thenReturn(getMockReplicas(4L, State.CLOSED, State.CLOSED,
State.CLOSED, State.CLOSED, State.CLOSED));
// Mis-replicated
ContainerInfo containerInfo5 =
TestContainerInfo.newBuilderForTest().setContainerID(5).setReplicationConfig(replicationConfig).build();
when(containerManagerMock.getContainer(ContainerID.valueOf(5L))).thenReturn(containerInfo5);
Set<ContainerReplica> misReplicas = getMockReplicas(5L,
State.CLOSED, State.CLOSED, State.CLOSED);
placementMock.setMisRepWhenDnPresent(
misReplicas.iterator().next().getDatanodeDetails().getUuid());
when(containerManagerMock.getContainerReplicas(containerInfo5.containerID()))
.thenReturn(misReplicas);
// Return 3 Healthy Replicas -> Healthy container
ContainerInfo containerInfo6 =
TestContainerInfo.newBuilderForTest().setContainerID(6).setReplicationConfig(replicationConfig).build();
when(containerManagerMock.getContainer(ContainerID.valueOf(6L))).thenReturn(containerInfo6);
when(containerManagerMock.getContainerReplicas(containerInfo6.containerID()))
.thenReturn(getMockReplicas(6L,
State.CLOSED, State.CLOSED, State.CLOSED));
// return 0 replicas for container ID 7 -> MISSING (will later transition to EMPTY_MISSING but not inserted into DB)
ContainerInfo containerInfo7 =
TestContainerInfo.newBuilderForTest().setContainerID(7).setReplicationConfig(replicationConfig).build();
when(containerManagerMock.getContainer(ContainerID.valueOf(7L))).thenReturn(containerInfo7);
when(containerManagerMock.getContainerReplicas(containerInfo7.containerID()))
.thenReturn(Collections.emptySet());
when(reconContainerMetadataManager.getKeyCountForContainer(
7L)).thenReturn(5L); // Indicates non-empty container 7 for now
// container ID 8 - REPLICA_MISMATCH
ContainerInfo containerInfo8 =
TestContainerInfo.newBuilderForTest().setContainerID(8).setReplicationConfig(replicationConfig).build();
when(containerManagerMock.getContainer(ContainerID.valueOf(8L))).thenReturn(containerInfo8);
Set<ContainerReplica> mismatchReplicas = getMockReplicasChecksumMismatch(8L,
State.CLOSED, State.CLOSED, State.CLOSED);
when(containerManagerMock.getContainerReplicas(containerInfo8.containerID()))
.thenReturn(mismatchReplicas);
List<UnhealthyContainers> all = unHealthyContainersTableHandle.findAll();
assertThat(all).isEmpty();
long currentTime = System.currentTimeMillis();
ReconTaskStatusDao reconTaskStatusDao = getDao(ReconTaskStatusDao.class);
ReconTaskConfig reconTaskConfig = new ReconTaskConfig();
reconTaskConfig.setMissingContainerTaskInterval(Duration.ofSeconds(10));
// Start container health task
ContainerHealthTask containerHealthTask =
new ContainerHealthTask(scmMock.getContainerManager(),
scmMock.getScmServiceProvider(), containerHealthSchemaManager,
placementMock, reconTaskConfig, reconContainerMetadataManager,
new OzoneConfiguration(), getMockTaskStatusUpdaterManager());
containerHealthTask.start();
// Ensure unhealthy container count in DB matches expected
LambdaTestUtils.await(60000, 1000, () ->
(unHealthyContainersTableHandle.count() == 6));
// Check for UNDER_REPLICATED container states
UnhealthyContainers rec =
unHealthyContainersTableHandle.fetchByContainerId(1L).get(0);
assertEquals("UNDER_REPLICATED", rec.getContainerState());
assertEquals(2, rec.getReplicaDelta().intValue());
rec = unHealthyContainersTableHandle.fetchByContainerId(2L).get(0);
assertEquals("UNDER_REPLICATED", rec.getContainerState());
assertEquals(3, rec.getReplicaDelta().intValue());
// Assert that EMPTY_MISSING state containers were never added to DB.
assertEquals(0,
unHealthyContainersTableHandle.fetchByContainerId(3L).size());
List<UnhealthyContainers> unhealthyContainers =
containerHealthSchemaManager.getUnhealthyContainers(
ALL_REPLICAS_BAD, 0L, Optional.empty(), Integer.MAX_VALUE);
assertEquals(1, unhealthyContainers.size());
assertEquals(2L,
unhealthyContainers.get(0).getContainerId().longValue());
assertEquals(0,
unhealthyContainers.get(0).getActualReplicaCount().intValue());
// Check for MISSING state in container ID 7
rec = unHealthyContainersTableHandle.fetchByContainerId(7L).get(0);
assertEquals("MISSING", rec.getContainerState());
assertEquals(3, rec.getReplicaDelta().intValue());
Field field = ContainerHealthTask.class.getDeclaredField("containerHealthMetrics");
field.setAccessible(true);
// Read private field value
ContainerHealthMetrics containerHealthMetrics = (ContainerHealthMetrics) field.get(containerHealthTask);
// Only Container ID: 7 is MISSING, so count of missing container count metrics should be equal to 1
assertEquals(1, containerHealthMetrics.getMissingContainerCount());
// Container ID: 1 and Container ID: 2, both are UNDER_REPLICATED, so UNDER_REPLICATED
// container count metric should be 2
assertEquals(2, containerHealthMetrics.getUnderReplicatedContainerCount());
rec = unHealthyContainersTableHandle.fetchByContainerId(4L).get(0);
assertEquals("OVER_REPLICATED", rec.getContainerState());
assertEquals(-2, rec.getReplicaDelta().intValue());
rec = unHealthyContainersTableHandle.fetchByContainerId(5L).get(0);
assertEquals("MIS_REPLICATED", rec.getContainerState());
assertEquals(1, rec.getReplicaDelta().intValue());
assertEquals(2, rec.getExpectedReplicaCount().intValue());
assertEquals(1, rec.getActualReplicaCount().intValue());
assertNotNull(rec.getReason());
rec = unHealthyContainersTableHandle.fetchByContainerId(8L).get(0);
assertEquals("REPLICA_MISMATCH", rec.getContainerState());
assertEquals(0, rec.getReplicaDelta().intValue());
assertEquals(3, rec.getExpectedReplicaCount().intValue());
assertEquals(3, rec.getActualReplicaCount().intValue());
ReconTaskStatus taskStatus =
reconTaskStatusDao.findById(containerHealthTask.getTaskName());
assertThat(taskStatus.getLastUpdatedTimestamp())
.isGreaterThan(currentTime);
// Adjust the mock results and rerun to check for updates or removal of records
when(containerManagerMock.getContainerReplicas(ContainerID.valueOf(1L)))
.thenReturn(getMockReplicas(1L, State.CLOSED, State.CLOSED));
// ID 2 was UNDER_REPLICATED - make it healthy now and after this step, UNDER_REPLICATED
// container count metric will be 1.
when(containerManagerMock.getContainerReplicas(ContainerID.valueOf(2L)))
.thenReturn(getMockReplicas(2L,
State.CLOSED, State.CLOSED, State.CLOSED));
// Container 3 remains EMPTY_MISSING, but no DB insertion
when(containerManagerMock.getContainerReplicas(ContainerID.valueOf(3L)))
.thenReturn(Collections.emptySet());
// Return 4 Healthy -> Delta changes from -2 to -1
when(containerManagerMock.getContainerReplicas(ContainerID.valueOf(4L)))
.thenReturn(getMockReplicas(4L, State.CLOSED, State.CLOSED,
State.CLOSED, State.CLOSED));
// Convert container 7 which was MISSING to EMPTY_MISSING (not inserted into DB)
when(reconContainerMetadataManager.getKeyCountForContainer(
7L)).thenReturn(0L);
placementMock.setMisRepWhenDnPresent(null);
// Ensure count is reduced after EMPTY_MISSING containers are not inserted
LambdaTestUtils.await(60000, 1000, () ->
(unHealthyContainersTableHandle.count() == 3));
rec = unHealthyContainersTableHandle.fetchByContainerId(1L).get(0);
assertEquals("UNDER_REPLICATED", rec.getContainerState());
assertEquals(1, rec.getReplicaDelta().intValue());
// This container is now healthy, it should not be in the table any more
assertEquals(0,
unHealthyContainersTableHandle.fetchByContainerId(2L).size());
// Now since container ID: 2 is gone back to HEALTHY state in above step, so UNDER-REPLICATED
// container count should be just 1 (denoting only for container ID : 1)
assertEquals(1, containerHealthMetrics.getUnderReplicatedContainerCount());
// Assert that for container 7 no records exist in DB because it's now EMPTY_MISSING
assertEquals(0,
unHealthyContainersTableHandle.fetchByContainerId(7L).size());
// Since Container ID: 7 is now EMPTY_MISSING, so MISSING container count metric
// will now be 0 as there is no missing container now.
assertEquals(0, containerHealthMetrics.getMissingContainerCount());
rec = unHealthyContainersTableHandle.fetchByContainerId(4L).get(0);
assertEquals("OVER_REPLICATED", rec.getContainerState());
assertEquals(-1, rec.getReplicaDelta().intValue());
// Ensure container 5 is now healthy and not in the table
assertEquals(0,
unHealthyContainersTableHandle.fetchByContainerId(5L).size());
// Just check once again that count remains consistent
LambdaTestUtils.await(60000, 1000, () ->
(unHealthyContainersTableHandle.count() == 3));
// Since other container states have been changing, but no change in UNDER_REPLICATED
// container count, UNDER_REPLICATED count metric should not be affected from previous
// assertion count.
assertEquals(1, containerHealthMetrics.getUnderReplicatedContainerCount());
assertEquals(0, containerHealthMetrics.getMissingContainerCount());
containerHealthTask.stop();
}
@Test
public void testDeletedContainer() throws Exception {
UnhealthyContainersDao unHealthyContainersTableHandle =
getDao(UnhealthyContainersDao.class);
ContainerHealthSchemaManager containerHealthSchemaManager =
new ContainerHealthSchemaManager(
getSchemaDefinition(ContainerSchemaDefinition.class),
unHealthyContainersTableHandle);
ReconStorageContainerManagerFacade scmMock =
mock(ReconStorageContainerManagerFacade.class);
MockPlacementPolicy placementMock = new MockPlacementPolicy();
ContainerManager containerManagerMock = mock(ContainerManager.class);
StorageContainerServiceProvider scmClientMock =
mock(StorageContainerServiceProvider.class);
ReconContainerMetadataManager reconContainerMetadataManager =
mock(ReconContainerMetadataManager.class);
// Create 2 containers. The first is OPEN will no replicas, the second is
// CLOSED with no replicas.
List<ContainerInfo> mockContainers = getMockContainers(3);
when(scmMock.getScmServiceProvider()).thenReturn(scmClientMock);
when(scmMock.getContainerManager()).thenReturn(containerManagerMock);
when(containerManagerMock.getContainers(any(ContainerID.class),
anyInt())).thenReturn(mockContainers);
for (ContainerInfo c : mockContainers) {
when(containerManagerMock.getContainer(c.containerID())).thenReturn(c);
when(scmClientMock.getContainerWithPipeline(c.getContainerID()))
.thenReturn(new ContainerWithPipeline(c, null));
}
// Empty Container with OPEN State and no replicas
when(containerManagerMock.getContainer(ContainerID.valueOf(1L)).getState())
.thenReturn(HddsProtos.LifeCycleState.OPEN);
when(containerManagerMock.getContainerReplicas(ContainerID.valueOf(1L)))
.thenReturn(Collections.emptySet());
when(scmClientMock.getContainerWithPipeline(1))
.thenReturn(new ContainerWithPipeline(mockContainers.get(0), null));
// Container State CLOSED with no replicas
when(containerManagerMock.getContainer(ContainerID.valueOf(2L)).getState())
.thenReturn(HddsProtos.LifeCycleState.CLOSED);
when(containerManagerMock.getContainerReplicas(ContainerID.valueOf(2L)))
.thenReturn(Collections.emptySet());
ContainerInfo mockDeletedContainer = getMockDeletedContainer(2);
when(scmClientMock.getContainerWithPipeline(2))
.thenReturn(new ContainerWithPipeline(mockDeletedContainer, null));
// Container with OPEN State and no replicas
when(containerManagerMock.getContainer(ContainerID.valueOf(3L)).getState())
.thenReturn(HddsProtos.LifeCycleState.OPEN);
when(containerManagerMock.getContainerReplicas(ContainerID.valueOf(3L)))
.thenReturn(Collections.emptySet());
when(scmClientMock.getContainerWithPipeline(3))
.thenReturn(new ContainerWithPipeline(mockContainers.get(0), null));
List<UnhealthyContainers> all = unHealthyContainersTableHandle.findAll();
assertThat(all).isEmpty();
long currentTime = System.currentTimeMillis();
ReconTaskStatusDao reconTaskStatusDao = getDao(ReconTaskStatusDao.class);
ReconTaskConfig reconTaskConfig = new ReconTaskConfig();
reconTaskConfig.setMissingContainerTaskInterval(Duration.ofSeconds(2));
when(reconContainerMetadataManager.getKeyCountForContainer(
1L)).thenReturn(5L);
ContainerHealthTask containerHealthTask =
new ContainerHealthTask(scmMock.getContainerManager(),
scmMock.getScmServiceProvider(), containerHealthSchemaManager,
placementMock, reconTaskConfig, reconContainerMetadataManager,
new OzoneConfiguration(), getMockTaskStatusUpdaterManager());
containerHealthTask.start();
LambdaTestUtils.await(6000, 1000, () ->
(unHealthyContainersTableHandle.count() == 1));
UnhealthyContainers rec =
unHealthyContainersTableHandle.fetchByContainerId(1L).get(0);
assertEquals("MISSING", rec.getContainerState());
assertEquals(3, rec.getReplicaDelta().intValue());
ReconTaskStatus taskStatus =
reconTaskStatusDao.findById(containerHealthTask.getTaskName());
assertThat(taskStatus.getLastUpdatedTimestamp())
.isGreaterThan(currentTime);
}
@Test
public void testAllContainerStateInsertions() {
UnhealthyContainersDao unHealthyContainersTableHandle =
getDao(UnhealthyContainersDao.class);
ContainerHealthSchemaManager containerHealthSchemaManager =
new ContainerHealthSchemaManager(
getSchemaDefinition(ContainerSchemaDefinition.class),
unHealthyContainersTableHandle);
// Iterate through each state in the UnHealthyContainerStates enum
for (ContainerSchemaDefinition.UnHealthyContainerStates state :
ContainerSchemaDefinition.UnHealthyContainerStates.values()) {
// Create a dummy UnhealthyContainer record with the current state
UnhealthyContainers unhealthyContainer = new UnhealthyContainers();
unhealthyContainer.setContainerId(state.ordinal() + 1L);
// Set replica counts based on the state
switch (state) {
case MISSING:
case EMPTY_MISSING:
unhealthyContainer.setExpectedReplicaCount(3);
unhealthyContainer.setActualReplicaCount(0);
unhealthyContainer.setReplicaDelta(3);
break;
case UNDER_REPLICATED:
unhealthyContainer.setExpectedReplicaCount(3);
unhealthyContainer.setActualReplicaCount(1);
unhealthyContainer.setReplicaDelta(2);
break;
case OVER_REPLICATED:
unhealthyContainer.setExpectedReplicaCount(3);
unhealthyContainer.setActualReplicaCount(4);
unhealthyContainer.setReplicaDelta(-1);
break;
case MIS_REPLICATED:
case NEGATIVE_SIZE:
case REPLICA_MISMATCH:
unhealthyContainer.setExpectedReplicaCount(3);
unhealthyContainer.setActualReplicaCount(3);
unhealthyContainer.setReplicaDelta(0);
break;
case ALL_REPLICAS_BAD:
unhealthyContainer.setExpectedReplicaCount(3);
unhealthyContainer.setActualReplicaCount(0);
unhealthyContainer.setReplicaDelta(3);
break;
default:
fail("Unhandled state: " + state.name() + ". Please add this state to the switch case.");
}
unhealthyContainer.setContainerState(state.name());
unhealthyContainer.setInStateSince(System.currentTimeMillis());
// Try inserting the record and catch any exception that occurs
Exception exception = null;
try {
containerHealthSchemaManager.insertUnhealthyContainerRecords(
Collections.singletonList(unhealthyContainer));
} catch (Exception e) {
exception = e;
}
// Assert no exception should be thrown for each state
assertNull(exception,
"Exception was thrown during insertion for state " + state.name() +
": " + exception);
// Optionally, verify the record was inserted correctly
List<UnhealthyContainers> insertedRecords =
unHealthyContainersTableHandle.fetchByContainerId(
state.ordinal() + 1L);
assertFalse(insertedRecords.isEmpty(),
"Record was not inserted for state " + state.name() + ".");
assertEquals(insertedRecords.get(0).getContainerState(), state.name(),
"The inserted container state does not match for state " +
state.name() + ".");
}
}
@Test
public void testInsertFailureAndUpdateBehavior() {
UnhealthyContainersDao unHealthyContainersTableHandle =
getDao(UnhealthyContainersDao.class);
ContainerHealthSchemaManager containerHealthSchemaManager =
new ContainerHealthSchemaManager(
getSchemaDefinition(ContainerSchemaDefinition.class),
unHealthyContainersTableHandle);
ContainerSchemaDefinition.UnHealthyContainerStates state =
ContainerSchemaDefinition.UnHealthyContainerStates.MISSING;
long insertedTime = System.currentTimeMillis();
// Create a dummy UnhealthyContainer record with the current state
UnhealthyContainers unhealthyContainer = new UnhealthyContainers();
unhealthyContainer.setContainerId(state.ordinal() + 1L);
unhealthyContainer.setExpectedReplicaCount(3);
unhealthyContainer.setActualReplicaCount(0);
unhealthyContainer.setReplicaDelta(3);
unhealthyContainer.setContainerState(state.name());
unhealthyContainer.setInStateSince(insertedTime);
// Try inserting the record and catch any exception that occurs
Exception exception = null;
try {
containerHealthSchemaManager.insertUnhealthyContainerRecords(
Collections.singletonList(unhealthyContainer));
} catch (Exception e) {
exception = e;
}
// Assert no exception should be thrown for each state
assertNull(exception,
"Exception was thrown during insertion for state " + state.name() +
": " + exception);
long updatedTime = System.currentTimeMillis();
unhealthyContainer.setExpectedReplicaCount(3);
unhealthyContainer.setActualReplicaCount(0);
unhealthyContainer.setReplicaDelta(3);
unhealthyContainer.setContainerState(state.name());
unhealthyContainer.setInStateSince(updatedTime);
try {
containerHealthSchemaManager.insertUnhealthyContainerRecords(
Collections.singletonList(unhealthyContainer));
} catch (Exception e) {
exception = e;
}
// Optionally, verify the record was updated correctly
List<UnhealthyContainers> updatedRecords =
unHealthyContainersTableHandle.fetchByContainerId(
state.ordinal() + 1L);
assertFalse(updatedRecords.isEmpty(),
"Record was not updated for state " + state.name() + ".");
assertEquals(updatedRecords.get(0).getContainerState(), state.name(),
"The inserted container state does not match for state " +
state.name() + ".");
assertEquals(updatedRecords.get(0).getInStateSince(), updatedTime);
}
@Test
public void testMissingAndEmptyMissingContainerDeletion() throws Exception {
// Setup mock DAOs and managers
UnhealthyContainersDao unHealthyContainersTableHandle =
getDao(UnhealthyContainersDao.class);
ContainerHealthSchemaManager containerHealthSchemaManager =
new ContainerHealthSchemaManager(
getSchemaDefinition(ContainerSchemaDefinition.class),
unHealthyContainersTableHandle);
ReconStorageContainerManagerFacade scmMock =
mock(ReconStorageContainerManagerFacade.class);
MockPlacementPolicy placementMock = new MockPlacementPolicy();
ContainerManager containerManagerMock = mock(ContainerManager.class);
StorageContainerServiceProvider scmClientMock =
mock(StorageContainerServiceProvider.class);
ReconContainerMetadataManager reconContainerMetadataManager =
mock(ReconContainerMetadataManager.class);
mock(ReconContainerMetadataManager.class);
// Create 2 containers. They start in CLOSED state in Recon.
List<ContainerInfo> mockContainers = getMockContainers(2);
when(scmMock.getScmServiceProvider()).thenReturn(scmClientMock);
when(scmMock.getContainerManager()).thenReturn(containerManagerMock);
when(containerManagerMock.getContainers(any(ContainerID.class),
anyInt())).thenReturn(mockContainers);
// Mark both containers as initially CLOSED in Recon
for (ContainerInfo c : mockContainers) {
when(containerManagerMock.getContainer(c.containerID())).thenReturn(c);
}
// Simulate SCM reporting the containers as DELETED
ContainerInfo deletedContainer1 = getMockDeletedContainer(1);
ContainerInfo deletedContainer2 = getMockDeletedContainer(2);
when(scmClientMock.getContainerWithPipeline(1))
.thenReturn(new ContainerWithPipeline(deletedContainer1, null));
when(scmClientMock.getContainerWithPipeline(2))
.thenReturn(new ContainerWithPipeline(deletedContainer2, null));
// Both containers start as CLOSED in Recon (MISSING or EMPTY_MISSING)
when(containerManagerMock.getContainer(ContainerID.valueOf(1L)).getState())
.thenReturn(HddsProtos.LifeCycleState.CLOSED);
when(containerManagerMock.getContainer(ContainerID.valueOf(2L)).getState())
.thenReturn(HddsProtos.LifeCycleState.CLOSED);
// Replicas are empty, so both containers should be considered for deletion
when(containerManagerMock.getContainerReplicas(ContainerID.valueOf(1L)))
.thenReturn(Collections.emptySet());
when(containerManagerMock.getContainerReplicas(ContainerID.valueOf(2L)))
.thenReturn(Collections.emptySet());
// Initialize UnhealthyContainers in DB (MISSING and EMPTY_MISSING)
// Create and set up the first UnhealthyContainer for a MISSING container
UnhealthyContainers container1 = new UnhealthyContainers();
container1.setContainerId(1L);
container1.setContainerState("MISSING");
container1.setExpectedReplicaCount(3);
container1.setActualReplicaCount(0);
container1.setReplicaDelta(3);
container1.setInStateSince(System.currentTimeMillis());
// Create and set up the second UnhealthyContainer for an EMPTY_MISSING container
UnhealthyContainers container2 = new UnhealthyContainers();
container2.setContainerId(2L);
container2.setContainerState("MISSING");
container2.setExpectedReplicaCount(3);
container2.setActualReplicaCount(0);
container2.setReplicaDelta(3);
container2.setInStateSince(System.currentTimeMillis());
unHealthyContainersTableHandle.insert(container1);
unHealthyContainersTableHandle.insert(container2);
when(reconContainerMetadataManager.getKeyCountForContainer(1L)).thenReturn(5L);
when(reconContainerMetadataManager.getKeyCountForContainer(2L)).thenReturn(0L);
// Start the container health task
ReconTaskConfig reconTaskConfig = new ReconTaskConfig();
reconTaskConfig.setMissingContainerTaskInterval(Duration.ofSeconds(2));
ContainerHealthTask containerHealthTask =
new ContainerHealthTask(scmMock.getContainerManager(),
scmMock.getScmServiceProvider(), containerHealthSchemaManager,
placementMock, reconTaskConfig, reconContainerMetadataManager,
new OzoneConfiguration(), getMockTaskStatusUpdaterManager());
containerHealthTask.start();
// Wait for the task to complete and ensure that updateContainerState is invoked for
// container IDs 1 and 2 to mark the containers as DELETED, since they are DELETED in SCM.
LambdaTestUtils.await(60000, 1000, () -> {
verify(containerManagerMock, times(1))
.updateContainerState(ContainerID.valueOf(1L), HddsProtos.LifeCycleEvent.DELETE);
verify(containerManagerMock, times(1))
.updateContainerState(ContainerID.valueOf(2L), HddsProtos.LifeCycleEvent.DELETE);
return true;
});
}
private ReconTaskStatusUpdaterManager getMockTaskStatusUpdaterManager() {
ReconTaskStatusUpdaterManager reconTaskStatusUpdaterManager = mock(ReconTaskStatusUpdaterManager.class);
when(reconTaskStatusUpdaterManager.getTaskStatusUpdater(anyString())).thenAnswer(inv -> {
String taskName = inv.getArgument(0);
return new ReconTaskStatusUpdater(getDao(ReconTaskStatusDao.class), taskName);
});
return reconTaskStatusUpdaterManager;
}
private Set<ContainerReplica> getMockReplicas(
long containerId, State...states) {
Set<ContainerReplica> replicas = new HashSet<>();
for (State s : states) {
replicas.add(ContainerReplica.newBuilder()
.setDatanodeDetails(MockDatanodeDetails.randomDatanodeDetails())
.setContainerState(s)
.setContainerID(ContainerID.valueOf(containerId))
.setSequenceId(1)
.setDataChecksum(1234L)
.build());
}
return replicas;
}
private Set<ContainerReplica> getMockReplicasChecksumMismatch(
long containerId, State...states) {
Set<ContainerReplica> replicas = new HashSet<>();
long checksum = 1234L;
for (State s : states) {
replicas.add(ContainerReplica.newBuilder()
.setDatanodeDetails(MockDatanodeDetails.randomDatanodeDetails())
.setContainerState(s)
.setContainerID(ContainerID.valueOf(containerId))
.setSequenceId(1)
.setDataChecksum(checksum)
.build());
checksum++;
}
return replicas;
}
private List<ContainerInfo> getMockContainers(int num) {
List<ContainerInfo> containers = new ArrayList<>();
for (int i = 1; i <= num; i++) {
ContainerInfo c = mock(ContainerInfo.class);
when(c.getContainerID()).thenReturn((long)i);
when(c.getReplicationConfig())
.thenReturn(RatisReplicationConfig.getInstance(
THREE));
when(c.getReplicationFactor())
.thenReturn(THREE);
when(c.getState()).thenReturn(HddsProtos.LifeCycleState.CLOSED);
when(c.containerID()).thenReturn(ContainerID.valueOf(i));
containers.add(c);
}
return containers;
}
private ContainerInfo getMockDeletedContainer(int containerID) {
ContainerInfo c = mock(ContainerInfo.class);
when(c.getContainerID()).thenReturn((long)containerID);
when(c.getReplicationConfig())
.thenReturn(RatisReplicationConfig
.getInstance(THREE));
when(c.containerID()).thenReturn(ContainerID.valueOf(containerID));
when(c.getState()).thenReturn(HddsProtos.LifeCycleState.DELETED);
return c;
}
/**
* This is a simple implementation of PlacementPolicy, so that when
* validateContainerPlacement() is called, by default it will return a value
* placement object. To get an invalid placement object, simply pass a UUID
* of a datanode via setMisRepWhenDnPresent. If a DN with that UUID is passed
* to validateContainerPlacement, then it will return an invalid placement.
*/
private static class MockPlacementPolicy implements
PlacementPolicy {
private UUID misRepWhenDnPresent = null;
public void setMisRepWhenDnPresent(UUID dn) {
misRepWhenDnPresent = dn;
}
@Override
public List<DatanodeDetails> chooseDatanodes(
List<DatanodeDetails> usedNodes, List<DatanodeDetails> excludedNodes,
List<DatanodeDetails> favoredNodes,
int nodesRequired, long metadataSizeRequired, long dataSizeRequired)
throws IOException {
return null;
}
@Override
public ContainerPlacementStatus validateContainerPlacement(
List<DatanodeDetails> dns, int replicas) {
if (misRepWhenDnPresent != null && isDnPresent(dns)) {
return new ContainerPlacementStatusDefault(1, 2, 3);
} else {
return new ContainerPlacementStatusDefault(1, 1, 1);
}
}
@Override
public Set<ContainerReplica> replicasToCopyToFixMisreplication(
Map<ContainerReplica, Boolean> replicas) {
return Collections.emptySet();
}
@Override
public Set<ContainerReplica> replicasToRemoveToFixOverreplication(
Set<ContainerReplica> replicas, int expectedCountPerUniqueReplica) {
return null;
}
private boolean isDnPresent(List<DatanodeDetails> dns) {
for (DatanodeDetails dn : dns) {
if (misRepWhenDnPresent != null
&& dn.getUuid().equals(misRepWhenDnPresent)) {
return true;
}
}
return false;
}
}
}
|
googleapis/google-cloud-java | 36,987 | java-managedkafka/proto-google-cloud-managedkafka-v1/src/main/java/com/google/cloud/managedkafka/v1/UpdateConnectorRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/managedkafka/v1/managed_kafka_connect.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.managedkafka.v1;
/**
*
*
* <pre>
* Request for UpdateConnector.
* </pre>
*
* Protobuf type {@code google.cloud.managedkafka.v1.UpdateConnectorRequest}
*/
public final class UpdateConnectorRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.managedkafka.v1.UpdateConnectorRequest)
UpdateConnectorRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateConnectorRequest.newBuilder() to construct.
private UpdateConnectorRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateConnectorRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateConnectorRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.managedkafka.v1.ManagedKafkaConnectProto
.internal_static_google_cloud_managedkafka_v1_UpdateConnectorRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.managedkafka.v1.ManagedKafkaConnectProto
.internal_static_google_cloud_managedkafka_v1_UpdateConnectorRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.managedkafka.v1.UpdateConnectorRequest.class,
com.google.cloud.managedkafka.v1.UpdateConnectorRequest.Builder.class);
}
private int bitField0_;
public static final int UPDATE_MASK_FIELD_NUMBER = 1;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* cluster resource by the update. The fields specified in the update_mask are
* relative to the resource, not the full request. A field will be overwritten
* if it is in the mask. The mask is required and a value of * will update all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* cluster resource by the update. The fields specified in the update_mask are
* relative to the resource, not the full request. A field will be overwritten
* if it is in the mask. The mask is required and a value of * will update all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* cluster resource by the update. The fields specified in the update_mask are
* relative to the resource, not the full request. A field will be overwritten
* if it is in the mask. The mask is required and a value of * will update all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
public static final int CONNECTOR_FIELD_NUMBER = 2;
private com.google.cloud.managedkafka.v1.Connector connector_;
/**
*
*
* <pre>
* Required. The connector to update. Its `name` field must be populated.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.Connector connector = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the connector field is set.
*/
@java.lang.Override
public boolean hasConnector() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The connector to update. Its `name` field must be populated.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.Connector connector = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The connector.
*/
@java.lang.Override
public com.google.cloud.managedkafka.v1.Connector getConnector() {
return connector_ == null
? com.google.cloud.managedkafka.v1.Connector.getDefaultInstance()
: connector_;
}
/**
*
*
* <pre>
* Required. The connector to update. Its `name` field must be populated.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.Connector connector = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.managedkafka.v1.ConnectorOrBuilder getConnectorOrBuilder() {
return connector_ == null
? com.google.cloud.managedkafka.v1.Connector.getDefaultInstance()
: connector_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getConnector());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getConnector());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.managedkafka.v1.UpdateConnectorRequest)) {
return super.equals(obj);
}
com.google.cloud.managedkafka.v1.UpdateConnectorRequest other =
(com.google.cloud.managedkafka.v1.UpdateConnectorRequest) obj;
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (hasConnector() != other.hasConnector()) return false;
if (hasConnector()) {
if (!getConnector().equals(other.getConnector())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
if (hasConnector()) {
hash = (37 * hash) + CONNECTOR_FIELD_NUMBER;
hash = (53 * hash) + getConnector().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.managedkafka.v1.UpdateConnectorRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.managedkafka.v1.UpdateConnectorRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.UpdateConnectorRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.managedkafka.v1.UpdateConnectorRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.UpdateConnectorRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.managedkafka.v1.UpdateConnectorRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.UpdateConnectorRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.managedkafka.v1.UpdateConnectorRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.UpdateConnectorRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.managedkafka.v1.UpdateConnectorRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.UpdateConnectorRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.managedkafka.v1.UpdateConnectorRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.managedkafka.v1.UpdateConnectorRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for UpdateConnector.
* </pre>
*
* Protobuf type {@code google.cloud.managedkafka.v1.UpdateConnectorRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.managedkafka.v1.UpdateConnectorRequest)
com.google.cloud.managedkafka.v1.UpdateConnectorRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.managedkafka.v1.ManagedKafkaConnectProto
.internal_static_google_cloud_managedkafka_v1_UpdateConnectorRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.managedkafka.v1.ManagedKafkaConnectProto
.internal_static_google_cloud_managedkafka_v1_UpdateConnectorRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.managedkafka.v1.UpdateConnectorRequest.class,
com.google.cloud.managedkafka.v1.UpdateConnectorRequest.Builder.class);
}
// Construct using com.google.cloud.managedkafka.v1.UpdateConnectorRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getUpdateMaskFieldBuilder();
getConnectorFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
connector_ = null;
if (connectorBuilder_ != null) {
connectorBuilder_.dispose();
connectorBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.managedkafka.v1.ManagedKafkaConnectProto
.internal_static_google_cloud_managedkafka_v1_UpdateConnectorRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.UpdateConnectorRequest getDefaultInstanceForType() {
return com.google.cloud.managedkafka.v1.UpdateConnectorRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.UpdateConnectorRequest build() {
com.google.cloud.managedkafka.v1.UpdateConnectorRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.UpdateConnectorRequest buildPartial() {
com.google.cloud.managedkafka.v1.UpdateConnectorRequest result =
new com.google.cloud.managedkafka.v1.UpdateConnectorRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.managedkafka.v1.UpdateConnectorRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.connector_ = connectorBuilder_ == null ? connector_ : connectorBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.managedkafka.v1.UpdateConnectorRequest) {
return mergeFrom((com.google.cloud.managedkafka.v1.UpdateConnectorRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.managedkafka.v1.UpdateConnectorRequest other) {
if (other == com.google.cloud.managedkafka.v1.UpdateConnectorRequest.getDefaultInstance())
return this;
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
if (other.hasConnector()) {
mergeConnector(other.getConnector());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getConnectorFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* cluster resource by the update. The fields specified in the update_mask are
* relative to the resource, not the full request. A field will be overwritten
* if it is in the mask. The mask is required and a value of * will update all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* cluster resource by the update. The fields specified in the update_mask are
* relative to the resource, not the full request. A field will be overwritten
* if it is in the mask. The mask is required and a value of * will update all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* cluster resource by the update. The fields specified in the update_mask are
* relative to the resource, not the full request. A field will be overwritten
* if it is in the mask. The mask is required and a value of * will update all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* cluster resource by the update. The fields specified in the update_mask are
* relative to the resource, not the full request. A field will be overwritten
* if it is in the mask. The mask is required and a value of * will update all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* cluster resource by the update. The fields specified in the update_mask are
* relative to the resource, not the full request. A field will be overwritten
* if it is in the mask. The mask is required and a value of * will update all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* cluster resource by the update. The fields specified in the update_mask are
* relative to the resource, not the full request. A field will be overwritten
* if it is in the mask. The mask is required and a value of * will update all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000001);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* cluster resource by the update. The fields specified in the update_mask are
* relative to the resource, not the full request. A field will be overwritten
* if it is in the mask. The mask is required and a value of * will update all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* cluster resource by the update. The fields specified in the update_mask are
* relative to the resource, not the full request. A field will be overwritten
* if it is in the mask. The mask is required and a value of * will update all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. Field mask is used to specify the fields to be overwritten in the
* cluster resource by the update. The fields specified in the update_mask are
* relative to the resource, not the full request. A field will be overwritten
* if it is in the mask. The mask is required and a value of * will update all
* fields.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private com.google.cloud.managedkafka.v1.Connector connector_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.managedkafka.v1.Connector,
com.google.cloud.managedkafka.v1.Connector.Builder,
com.google.cloud.managedkafka.v1.ConnectorOrBuilder>
connectorBuilder_;
/**
*
*
* <pre>
* Required. The connector to update. Its `name` field must be populated.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.Connector connector = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the connector field is set.
*/
public boolean hasConnector() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The connector to update. Its `name` field must be populated.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.Connector connector = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The connector.
*/
public com.google.cloud.managedkafka.v1.Connector getConnector() {
if (connectorBuilder_ == null) {
return connector_ == null
? com.google.cloud.managedkafka.v1.Connector.getDefaultInstance()
: connector_;
} else {
return connectorBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The connector to update. Its `name` field must be populated.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.Connector connector = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setConnector(com.google.cloud.managedkafka.v1.Connector value) {
if (connectorBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
connector_ = value;
} else {
connectorBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The connector to update. Its `name` field must be populated.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.Connector connector = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setConnector(
com.google.cloud.managedkafka.v1.Connector.Builder builderForValue) {
if (connectorBuilder_ == null) {
connector_ = builderForValue.build();
} else {
connectorBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The connector to update. Its `name` field must be populated.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.Connector connector = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeConnector(com.google.cloud.managedkafka.v1.Connector value) {
if (connectorBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& connector_ != null
&& connector_ != com.google.cloud.managedkafka.v1.Connector.getDefaultInstance()) {
getConnectorBuilder().mergeFrom(value);
} else {
connector_ = value;
}
} else {
connectorBuilder_.mergeFrom(value);
}
if (connector_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The connector to update. Its `name` field must be populated.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.Connector connector = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearConnector() {
bitField0_ = (bitField0_ & ~0x00000002);
connector_ = null;
if (connectorBuilder_ != null) {
connectorBuilder_.dispose();
connectorBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The connector to update. Its `name` field must be populated.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.Connector connector = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.managedkafka.v1.Connector.Builder getConnectorBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getConnectorFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The connector to update. Its `name` field must be populated.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.Connector connector = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.managedkafka.v1.ConnectorOrBuilder getConnectorOrBuilder() {
if (connectorBuilder_ != null) {
return connectorBuilder_.getMessageOrBuilder();
} else {
return connector_ == null
? com.google.cloud.managedkafka.v1.Connector.getDefaultInstance()
: connector_;
}
}
/**
*
*
* <pre>
* Required. The connector to update. Its `name` field must be populated.
* </pre>
*
* <code>
* .google.cloud.managedkafka.v1.Connector connector = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.managedkafka.v1.Connector,
com.google.cloud.managedkafka.v1.Connector.Builder,
com.google.cloud.managedkafka.v1.ConnectorOrBuilder>
getConnectorFieldBuilder() {
if (connectorBuilder_ == null) {
connectorBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.managedkafka.v1.Connector,
com.google.cloud.managedkafka.v1.Connector.Builder,
com.google.cloud.managedkafka.v1.ConnectorOrBuilder>(
getConnector(), getParentForChildren(), isClean());
connector_ = null;
}
return connectorBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.managedkafka.v1.UpdateConnectorRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.managedkafka.v1.UpdateConnectorRequest)
private static final com.google.cloud.managedkafka.v1.UpdateConnectorRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.managedkafka.v1.UpdateConnectorRequest();
}
public static com.google.cloud.managedkafka.v1.UpdateConnectorRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateConnectorRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateConnectorRequest>() {
@java.lang.Override
public UpdateConnectorRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateConnectorRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateConnectorRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.UpdateConnectorRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,899 | java-dataform/proto-google-cloud-dataform-v1beta1/src/main/java/com/google/cloud/dataform/v1beta1/CreateWorkspaceRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dataform/v1beta1/dataform.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dataform.v1beta1;
/**
*
*
* <pre>
* `CreateWorkspace` request message.
* </pre>
*
* Protobuf type {@code google.cloud.dataform.v1beta1.CreateWorkspaceRequest}
*/
public final class CreateWorkspaceRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dataform.v1beta1.CreateWorkspaceRequest)
CreateWorkspaceRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateWorkspaceRequest.newBuilder() to construct.
private CreateWorkspaceRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateWorkspaceRequest() {
parent_ = "";
workspaceId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateWorkspaceRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataform.v1beta1.DataformProto
.internal_static_google_cloud_dataform_v1beta1_CreateWorkspaceRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataform.v1beta1.DataformProto
.internal_static_google_cloud_dataform_v1beta1_CreateWorkspaceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest.class,
com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The repository in which to create the workspace. Must be in the
* format `projects/*/locations/*/repositories/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The repository in which to create the workspace. Must be in the
* format `projects/*/locations/*/repositories/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int WORKSPACE_FIELD_NUMBER = 2;
private com.google.cloud.dataform.v1beta1.Workspace workspace_;
/**
*
*
* <pre>
* Required. The workspace to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.Workspace workspace = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the workspace field is set.
*/
@java.lang.Override
public boolean hasWorkspace() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The workspace to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.Workspace workspace = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The workspace.
*/
@java.lang.Override
public com.google.cloud.dataform.v1beta1.Workspace getWorkspace() {
return workspace_ == null
? com.google.cloud.dataform.v1beta1.Workspace.getDefaultInstance()
: workspace_;
}
/**
*
*
* <pre>
* Required. The workspace to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.Workspace workspace = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.dataform.v1beta1.WorkspaceOrBuilder getWorkspaceOrBuilder() {
return workspace_ == null
? com.google.cloud.dataform.v1beta1.Workspace.getDefaultInstance()
: workspace_;
}
public static final int WORKSPACE_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object workspaceId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the workspace, which will become the final
* component of the workspace's resource name.
* </pre>
*
* <code>string workspace_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The workspaceId.
*/
@java.lang.Override
public java.lang.String getWorkspaceId() {
java.lang.Object ref = workspaceId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
workspaceId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the workspace, which will become the final
* component of the workspace's resource name.
* </pre>
*
* <code>string workspace_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for workspaceId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getWorkspaceIdBytes() {
java.lang.Object ref = workspaceId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
workspaceId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getWorkspace());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(workspaceId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, workspaceId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getWorkspace());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(workspaceId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, workspaceId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest)) {
return super.equals(obj);
}
com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest other =
(com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasWorkspace() != other.hasWorkspace()) return false;
if (hasWorkspace()) {
if (!getWorkspace().equals(other.getWorkspace())) return false;
}
if (!getWorkspaceId().equals(other.getWorkspaceId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasWorkspace()) {
hash = (37 * hash) + WORKSPACE_FIELD_NUMBER;
hash = (53 * hash) + getWorkspace().hashCode();
}
hash = (37 * hash) + WORKSPACE_ID_FIELD_NUMBER;
hash = (53 * hash) + getWorkspaceId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* `CreateWorkspace` request message.
* </pre>
*
* Protobuf type {@code google.cloud.dataform.v1beta1.CreateWorkspaceRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dataform.v1beta1.CreateWorkspaceRequest)
com.google.cloud.dataform.v1beta1.CreateWorkspaceRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataform.v1beta1.DataformProto
.internal_static_google_cloud_dataform_v1beta1_CreateWorkspaceRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataform.v1beta1.DataformProto
.internal_static_google_cloud_dataform_v1beta1_CreateWorkspaceRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest.class,
com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest.Builder.class);
}
// Construct using com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getWorkspaceFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
workspace_ = null;
if (workspaceBuilder_ != null) {
workspaceBuilder_.dispose();
workspaceBuilder_ = null;
}
workspaceId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dataform.v1beta1.DataformProto
.internal_static_google_cloud_dataform_v1beta1_CreateWorkspaceRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest getDefaultInstanceForType() {
return com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest build() {
com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest buildPartial() {
com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest result =
new com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.workspace_ = workspaceBuilder_ == null ? workspace_ : workspaceBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.workspaceId_ = workspaceId_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest) {
return mergeFrom((com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest other) {
if (other == com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasWorkspace()) {
mergeWorkspace(other.getWorkspace());
}
if (!other.getWorkspaceId().isEmpty()) {
workspaceId_ = other.workspaceId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getWorkspaceFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
workspaceId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The repository in which to create the workspace. Must be in the
* format `projects/*/locations/*/repositories/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The repository in which to create the workspace. Must be in the
* format `projects/*/locations/*/repositories/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The repository in which to create the workspace. Must be in the
* format `projects/*/locations/*/repositories/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The repository in which to create the workspace. Must be in the
* format `projects/*/locations/*/repositories/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The repository in which to create the workspace. Must be in the
* format `projects/*/locations/*/repositories/*`.
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.dataform.v1beta1.Workspace workspace_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataform.v1beta1.Workspace,
com.google.cloud.dataform.v1beta1.Workspace.Builder,
com.google.cloud.dataform.v1beta1.WorkspaceOrBuilder>
workspaceBuilder_;
/**
*
*
* <pre>
* Required. The workspace to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.Workspace workspace = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the workspace field is set.
*/
public boolean hasWorkspace() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The workspace to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.Workspace workspace = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The workspace.
*/
public com.google.cloud.dataform.v1beta1.Workspace getWorkspace() {
if (workspaceBuilder_ == null) {
return workspace_ == null
? com.google.cloud.dataform.v1beta1.Workspace.getDefaultInstance()
: workspace_;
} else {
return workspaceBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The workspace to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.Workspace workspace = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setWorkspace(com.google.cloud.dataform.v1beta1.Workspace value) {
if (workspaceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
workspace_ = value;
} else {
workspaceBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The workspace to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.Workspace workspace = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setWorkspace(
com.google.cloud.dataform.v1beta1.Workspace.Builder builderForValue) {
if (workspaceBuilder_ == null) {
workspace_ = builderForValue.build();
} else {
workspaceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The workspace to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.Workspace workspace = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeWorkspace(com.google.cloud.dataform.v1beta1.Workspace value) {
if (workspaceBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& workspace_ != null
&& workspace_ != com.google.cloud.dataform.v1beta1.Workspace.getDefaultInstance()) {
getWorkspaceBuilder().mergeFrom(value);
} else {
workspace_ = value;
}
} else {
workspaceBuilder_.mergeFrom(value);
}
if (workspace_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The workspace to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.Workspace workspace = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearWorkspace() {
bitField0_ = (bitField0_ & ~0x00000002);
workspace_ = null;
if (workspaceBuilder_ != null) {
workspaceBuilder_.dispose();
workspaceBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The workspace to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.Workspace workspace = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dataform.v1beta1.Workspace.Builder getWorkspaceBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getWorkspaceFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The workspace to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.Workspace workspace = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dataform.v1beta1.WorkspaceOrBuilder getWorkspaceOrBuilder() {
if (workspaceBuilder_ != null) {
return workspaceBuilder_.getMessageOrBuilder();
} else {
return workspace_ == null
? com.google.cloud.dataform.v1beta1.Workspace.getDefaultInstance()
: workspace_;
}
}
/**
*
*
* <pre>
* Required. The workspace to create.
* </pre>
*
* <code>
* .google.cloud.dataform.v1beta1.Workspace workspace = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataform.v1beta1.Workspace,
com.google.cloud.dataform.v1beta1.Workspace.Builder,
com.google.cloud.dataform.v1beta1.WorkspaceOrBuilder>
getWorkspaceFieldBuilder() {
if (workspaceBuilder_ == null) {
workspaceBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dataform.v1beta1.Workspace,
com.google.cloud.dataform.v1beta1.Workspace.Builder,
com.google.cloud.dataform.v1beta1.WorkspaceOrBuilder>(
getWorkspace(), getParentForChildren(), isClean());
workspace_ = null;
}
return workspaceBuilder_;
}
private java.lang.Object workspaceId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the workspace, which will become the final
* component of the workspace's resource name.
* </pre>
*
* <code>string workspace_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The workspaceId.
*/
public java.lang.String getWorkspaceId() {
java.lang.Object ref = workspaceId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
workspaceId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the workspace, which will become the final
* component of the workspace's resource name.
* </pre>
*
* <code>string workspace_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for workspaceId.
*/
public com.google.protobuf.ByteString getWorkspaceIdBytes() {
java.lang.Object ref = workspaceId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
workspaceId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the workspace, which will become the final
* component of the workspace's resource name.
* </pre>
*
* <code>string workspace_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The workspaceId to set.
* @return This builder for chaining.
*/
public Builder setWorkspaceId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
workspaceId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the workspace, which will become the final
* component of the workspace's resource name.
* </pre>
*
* <code>string workspace_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearWorkspaceId() {
workspaceId_ = getDefaultInstance().getWorkspaceId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the workspace, which will become the final
* component of the workspace's resource name.
* </pre>
*
* <code>string workspace_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for workspaceId to set.
* @return This builder for chaining.
*/
public Builder setWorkspaceIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
workspaceId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dataform.v1beta1.CreateWorkspaceRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dataform.v1beta1.CreateWorkspaceRequest)
private static final com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest();
}
public static com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateWorkspaceRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateWorkspaceRequest>() {
@java.lang.Override
public CreateWorkspaceRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateWorkspaceRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateWorkspaceRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dataform.v1beta1.CreateWorkspaceRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/geode | 37,016 | geode-wan/src/main/java/org/apache/geode/cache/wan/internal/GatewaySenderEventRemoteDispatcher.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.cache.wan.internal;
import java.io.IOException;
import java.util.HashSet;
import java.util.List;
import java.util.Set;
import java.util.Vector;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.function.Consumer;
import java.util.regex.Pattern;
import org.apache.logging.log4j.Logger;
import org.apache.geode.CancelException;
import org.apache.geode.GemFireIOException;
import org.apache.geode.annotations.VisibleForTesting;
import org.apache.geode.cache.RegionDestroyedException;
import org.apache.geode.cache.client.ServerConnectivityException;
import org.apache.geode.cache.client.ServerOperationException;
import org.apache.geode.cache.client.internal.Connection;
import org.apache.geode.cache.client.internal.ExecutablePool;
import org.apache.geode.cache.client.internal.pooling.ConnectionDestroyedException;
import org.apache.geode.cache.wan.GatewayQueueEvent;
import org.apache.geode.cache.wan.GatewaySender;
import org.apache.geode.cache.wan.internal.client.locator.GatewaySenderBatchOp;
import org.apache.geode.cache.wan.internal.client.locator.SenderProxy;
import org.apache.geode.distributed.internal.ServerLocation;
import org.apache.geode.distributed.internal.ServerLocationAndMemberId;
import org.apache.geode.internal.cache.InternalCache;
import org.apache.geode.internal.cache.UpdateAttributesProcessor;
import org.apache.geode.internal.cache.tier.sockets.MessageTooLargeException;
import org.apache.geode.internal.cache.wan.AbstractGatewaySender;
import org.apache.geode.internal.cache.wan.AbstractGatewaySenderEventProcessor;
import org.apache.geode.internal.cache.wan.BatchException70;
import org.apache.geode.internal.cache.wan.GatewaySenderEventDispatcher;
import org.apache.geode.internal.cache.wan.GatewaySenderEventImpl;
import org.apache.geode.internal.cache.wan.GatewaySenderException;
import org.apache.geode.internal.cache.wan.GatewaySenderStats;
import org.apache.geode.logging.internal.log4j.api.LogService;
import org.apache.geode.pdx.PdxRegistryMismatchException;
import org.apache.geode.security.GemFireSecurityException;
/**
* @since GemFire 7.0
*/
public class GatewaySenderEventRemoteDispatcher implements GatewaySenderEventDispatcher {
private static final Logger logger = LogService.getLogger();
protected final AbstractGatewaySenderEventProcessor processor;
private volatile Connection connection;
private final Set<String> notFoundRegions = new HashSet<>();
private final Object notFoundRegionsSync = new Object();
private final AbstractGatewaySender sender;
private AckReaderThread ackReaderThread;
private final ReentrantReadWriteLock connectionLifeCycleLock = new ReentrantReadWriteLock();
protected static final String maxAttemptsReachedConnectingServerIdExceptionMessage =
"Reached max attempts number trying to connect to desired server id";
/*
* Called after each attempt at processing an outbound (dispatch) or inbound (ack)
* message, whether the attempt is successful or not. The purpose is testability.
* Without this hook, negative tests, can't ensure that message processing was
* attempted, so they wouldn't know how long to wait for some sort of failure.
*/
public static volatile Consumer<Boolean> messageProcessingAttempted = isAck -> {
};
/**
* This count is reset to 0 each time a successful connection is made.
*/
private int failedConnectCount = 0;
private static final int RETRY_WAIT_TIME = 100;
void setAckReaderThread(AckReaderThread ackReaderThread) {
this.ackReaderThread = ackReaderThread;
}
public GatewaySenderEventRemoteDispatcher(AbstractGatewaySenderEventProcessor eventProcessor) {
processor = eventProcessor;
sender = eventProcessor.getSender();
try {
initializeConnection();
} catch (GatewaySenderException e) {
// It is ok to ignore this exception. It is logged in the initializeConnection call.
}
}
GatewaySenderEventRemoteDispatcher(AbstractGatewaySenderEventProcessor processor,
Connection connection) {
this.processor = processor;
sender = processor.getSender();
this.connection = connection;
}
protected GatewayAck readAcknowledgement() {
SenderProxy sp = new SenderProxy(processor.getSender().getProxy());
GatewayAck ack = null;
Exception ex;
try {
connection = getConnection(false);
if (logger.isDebugEnabled()) {
logger.debug(" Receiving ack on the thread {}", connection);
}
getConnectionLifeCycleLock().readLock().lock();
try {
if (connection != null && !processor.isStopped()) {
ack = (GatewayAck) sp.receiveAckFromReceiver(connection);
}
} finally {
getConnectionLifeCycleLock().readLock().unlock();
}
} catch (Exception e) {
Throwable t = e.getCause();
if (t instanceof BatchException70) {
// A BatchException has occurred.
// Do not process the connection as dead since it is not dead.
ex = (BatchException70) t;
} else if (e instanceof GatewaySenderException) { // This Exception is thrown from
// getConnection
ex = (Exception) e.getCause();
} else {
ex = e;
// keep using the connection if we had a batch exception. Else, destroy
// it
destroyConnection();
}
if (sender.getProxy() == null || sender.getProxy().isDestroyed()) {
// if our pool is shutdown then just be silent
} else if (RecoverableExceptionPredicates.isRecoverableWhenReadingAck(ex)) {
sleepBeforeRetry();
} else {
logAndStopProcessor(ex);
}
} finally {
messageProcessingAttempted.accept(true);
}
return ack;
}
@Override
public boolean dispatchBatch(List events, boolean removeFromQueueOnException, boolean isRetry) {
GatewaySenderStats statistics = sender.getStatistics();
boolean success = false;
try {
long start = statistics.startTime();
success = _dispatchBatch(events, isRetry);
if (success) {
statistics.endBatch(start, events.size());
}
} catch (GatewaySenderException ge) {
Throwable t = ge.getCause();
if (sender.getProxy() == null || sender.getProxy().isDestroyed()) {
// if our pool is shutdown then just be silent
} else if (RecoverableExceptionPredicates.isRecoverableWhenDispatchingBatch(t)) {
processor.handleException();
sleepBeforeRetry();
if (logger.isDebugEnabled()) {
logger.debug(
"Failed to dispatch a batch with id {} due to non-fatal exception {}. Retrying in {} ms",
processor.getBatchId(), t, RETRY_WAIT_TIME);
}
} else {
logAndStopProcessor(ge);
}
} catch (CancelException e) {
logAndStopProcessor(e);
throw e;
} catch (Exception e) {
logAndStopProcessor(e);
} finally {
messageProcessingAttempted.accept(false);
}
return success;
}
private boolean _dispatchBatch(List events, boolean isRetry) {
Exception ex = null;
int currentBatchId = processor.getBatchId();
connection = getConnection(true);
int batchIdForThisConnection = processor.getBatchId();
GatewaySenderStats statistics = sender.getStatistics();
// This means we are writing to a new connection than the previous batch.
// i.e The connection has been reset. It also resets the batchId.
if (currentBatchId != batchIdForThisConnection || processor.isConnectionReset()) {
return false;
}
try {
if (processor.isConnectionReset()) {
isRetry = true;
}
SenderProxy sp = new SenderProxy(sender.getProxy());
getConnectionLifeCycleLock().readLock().lock();
try {
if (connection != null && !connection.isDestroyed()) {
sp.dispatchBatch_NewWAN(connection, events, currentBatchId,
sender.isRemoveFromQueueOnException(), isRetry);
if (logger.isDebugEnabled()) {
logger.debug(
"{} : Dispatched batch (id={}) of {} events, queue size: {} on connection {}",
processor.getSender(), currentBatchId, events.size(),
processor.getQueue().size(), connection);
}
} else {
throw new ConnectionDestroyedException();
}
} finally {
getConnectionLifeCycleLock().readLock().unlock();
}
return true;
} catch (ServerOperationException e) {
Throwable t = e.getCause();
if (t instanceof BatchException70) {
// A BatchException has occurred.
// Do not process the connection as dead since it is not dead.
ex = (BatchException70) t;
} else {
ex = e;
// keep using the connection if we had a batch exception. Else, destroy it
destroyConnection();
}
throw new GatewaySenderException(
String.format("%s : Exception during processing batch %s on connection %s",
this, currentBatchId, connection),
ex);
} catch (GemFireIOException e) {
Throwable t = e.getCause();
if (t instanceof MessageTooLargeException) {
// A MessageTooLargeException has occurred.
// Do not process the connection as dead since it is not dead.
ex = (MessageTooLargeException) t;
// Reduce the batch size by half of the configured batch size or number of events in the
// current batch (whichever is less)
int newBatchSize = Math.min(events.size(), processor.getBatchSize()) / 2;
logger.warn(String.format(
"The following exception occurred attempting to send a batch of %s events. The batch will be tried again after reducing the batch size to %s events.",
events.size(), newBatchSize),
e);
processor.setBatchSize(newBatchSize);
statistics.incBatchesResized();
} else {
ex = e;
// keep using the connection if we had a MessageTooLargeException. Else, destroy it
destroyConnection();
}
throw new GatewaySenderException(
String.format("%s : Exception during processing batch %s on connection %s",
this, currentBatchId, connection),
ex);
} catch (IllegalStateException e) {
processor.setException(new GatewaySenderException(e));
throw new GatewaySenderException(
String.format("%s : Exception during processing batch %s on connection %s",
this, currentBatchId, connection),
e);
} catch (Exception e) {
// An Exception has occurred. Get its cause.
Throwable t = e.getCause();
if (t instanceof IOException) {
// An IOException has occurred.
ex = (IOException) t;
} else {
ex = e;
}
// the cause is not going to be BatchException70. So, destroy the connection
destroyConnection();
throw new GatewaySenderException(
String.format("%s : Exception during processing batch %s on connection %s",
this, currentBatchId, connection),
ex);
}
}
@VisibleForTesting
ReentrantReadWriteLock getConnectionLifeCycleLock() {
return connectionLifeCycleLock;
}
/**
* Acquires or adds a new <code>Connection</code> to the corresponding <code>Gateway</code>
*
* @return the <code>Connection</code>
*
*/
public Connection getConnection(boolean startAckReaderThread) throws GatewaySenderException {
if (processor.isStopped()) {
stop();
return null;
}
// IF the connection is null
// OR the connection's ServerLocation doesn't match with the one stored in sender
// THEN initialize the connection
if (!sender.isParallel()) {
boolean needToReconnect = false;
getConnectionLifeCycleLock().readLock().lock();
try {
needToReconnect = connection == null || connection.isDestroyed()
|| connection.getServer() == null
|| !connection.getServer().equals(sender.getServerLocation());
} finally {
getConnectionLifeCycleLock().readLock().unlock();
}
if (needToReconnect) {
if (logger.isDebugEnabled()) {
logger.debug(
"Initializing new connection as serverLocation of old connection is : {} and the serverLocation to connect is {}",
((connection == null) ? "null" : connection.getServer()),
sender.getServerLocation());
}
// Initialize the connection
initializeConnection();
}
} else {
if (connection == null || connection.isDestroyed()) {
initializeConnection();
}
}
// Here we might wait on a connection to another server if I was secondary
// so don't start waiting until I am primary
InternalCache cache = sender.getCache();
if (cache != null && !cache.isClosed()) {
if (sender.isPrimary() && (connection != null)) {
if (ackReaderThread == null || !ackReaderThread.isRunning()) {
ackReaderThread = new AckReaderThread(sender, processor);
ackReaderThread.start();
ackReaderThread.waitForRunningAckReaderThreadRunningState();
}
}
}
return connection;
}
public void destroyConnection() {
getConnectionLifeCycleLock().writeLock().lock();
try {
Connection con = connection;
if (con != null) {
if (!con.isDestroyed()) {
con.destroy();
sender.getProxy().returnConnection(con);
}
// Reset the connection so the next time through a new one will be
// obtained
connection = null;
sender.setServerLocation(null);
}
} finally {
getConnectionLifeCycleLock().writeLock().unlock();
}
}
Connection retryInitializeConnection(Connection con) {
final boolean isDebugEnabled = logger.isDebugEnabled();
String connectedServerId = con.getEndpoint().getMemberId().getUniqueId();
String expectedServerId = processor.getExpectedReceiverUniqueId();
if (expectedServerId.equals("")) {
if (isDebugEnabled) {
logger.debug("First dispatcher connected to server " + connectedServerId);
}
processor.setExpectedReceiverUniqueId(connectedServerId);
return con;
}
int attempt = 0;
final int attemptsPerServer = 5;
int maxAttempts = attemptsPerServer;
Vector<String> notExpectedServerIds = new Vector<>();
boolean connectedToExpectedReceiver = connectedServerId.equals(expectedServerId);
while (!connectedToExpectedReceiver) {
if (isDebugEnabled) {
logger.debug("Dispatcher wants to connect to [" + expectedServerId
+ "] but got connection to [" + connectedServerId + "]");
}
attempt++;
if (!notExpectedServerIds.contains(connectedServerId)) {
if (isDebugEnabled) {
logger.debug(
"Increasing dispatcher connection max retries number due to connection to unknown server ("
+ connectedServerId + ")");
}
notExpectedServerIds.add(connectedServerId);
maxAttempts += attemptsPerServer;
}
if (attempt >= maxAttempts) {
throw new ServerConnectivityException(maxAttemptsReachedConnectingServerIdExceptionMessage
+ " [" + expectedServerId + "] (" + maxAttempts + " attempts).");
}
con.destroy();
sender.getProxy().returnConnection(con);
con = sender.getProxy().acquireConnection();
connectedServerId = con.getEndpoint().getMemberId().getUniqueId();
if (connectedServerId.equals(expectedServerId)) {
connectedToExpectedReceiver = true;
}
}
if (isDebugEnabled) {
logger.debug("Dispatcher connected to expected endpoint " + connectedServerId
+ " after " + attempt + " retries.");
}
return con;
}
/**
* Initializes the <code>Connection</code>.
*
*/
@VisibleForTesting
void initializeConnection() throws GatewaySenderException, GemFireSecurityException {
final boolean isDebugEnabled = logger.isDebugEnabled();
if (ackReaderThread != null) {
ackReaderThread.shutDownAckReaderConnection(connection);
}
getConnectionLifeCycleLock().writeLock().lock();
try {
// Attempt to acquire a connection
if (sender.getProxy() == null || sender.getProxy().isDestroyed()) {
sender.initProxy();
} else {
processor.resetBatchId();
}
Connection con;
try {
if (sender.isParallel()) {
/*
* TODO - The use of acquireConnection should be removed from the gateway code. This
* method is fine for tests, but these connections should really be managed inside the
* pool code. If the gateway needs to persistent connection to a single server, which
* should create have the OpExecutor that holds a reference to the connection. Use {@link
* ExecutablePool#setupServerAffinity(boolean)} for gateway code
*/
con = sender.getProxy().acquireConnection();
// For parallel sender, setting server location will not matter.
// everytime it will ask for acquire connection whenever it needs it. I
// am saving this server location for command purpose
sender.setServerLocation(con.getServer());
} else {
synchronized (sender.getLockForConcurrentDispatcher()) {
ServerLocation server = sender.getServerLocation();
if (server != null) {
if (isDebugEnabled) {
logger.debug("ServerLocation is: {}. Connecting to this serverLocation...", server);
}
con = sender.getProxy().acquireConnection(server);
} else {
if (isDebugEnabled) {
logger.debug("ServerLocation is null. Creating new connection. ");
}
con = sender.getProxy().acquireConnection();
}
if (sender.getEnforceThreadsConnectSameReceiver()) {
con = retryInitializeConnection(con);
}
if (sender.isPrimary()) {
if (sender.getServerLocation() == null) {
sender.setServerLocation(con.getServer());
}
new UpdateAttributesProcessor(sender).distribute(false);
}
}
}
} catch (ServerConnectivityException e) {
// Get the exception to throw
GatewaySenderException gse = getInitializeConnectionExceptionToThrow(e);
// Set the serverLocation to null so that a new connection can be obtained in next attempt
sender.setServerLocation(null);
// Log the exception if necessary
if (logConnectionFailure()) {
// only log this message once; another msg is logged once we connect
logger.warn("{} : Could not connect due to: {}",
processor.getSender().getId(), gse.getCause().getMessage());
}
// Increment failed connection count
failedConnectCount++;
// Throw the exception
throw gse;
}
if (failedConnectCount > 0) {
Object[] logArgs =
new Object[] {processor.getSender().getId(), con, failedConnectCount};
logger.info("{}: Using {} after {} failed connect attempts",
logArgs);
failedConnectCount = 0;
} else {
Object[] logArgs = new Object[] {processor.getSender().getId(), con};
logger.info("{}: Using {}", logArgs);
}
connection = con;
processor.checkIfPdxNeedsResend(connection.getQueueStatus().getPdxSize());
} catch (ConnectionDestroyedException e) {
throw new GatewaySenderException(
String.format("%s : Could not connect due to: %s",
processor.getSender().getId(), e.getMessage()),
e);
} finally {
getConnectionLifeCycleLock().writeLock().unlock();
}
}
private GatewaySenderException getInitializeConnectionExceptionToThrow(
ServerConnectivityException e) {
GatewaySenderException gse = null;
if (e.getCause() instanceof GemFireSecurityException) {
gse = new GatewaySenderException(e.getCause());
} else {
List<ServerLocationAndMemberId> servers = sender.getProxy().getCurrentServers();
String ioMsg;
if (servers.size() == 0) {
ioMsg = "There are no active servers.";
} else {
final StringBuilder buffer = new StringBuilder();
for (ServerLocationAndMemberId server : servers) {
String endpointName = String.valueOf(server);
if (buffer.length() > 0) {
buffer.append(", ");
}
buffer.append(endpointName);
}
ioMsg =
String.format(
"No available connection was found, but the following active servers exist: %s",
buffer);
}
if (sender.getEnforceThreadsConnectSameReceiver() && e.getMessage() != null) {
if (Pattern.compile(maxAttemptsReachedConnectingServerIdExceptionMessage + ".*")
.matcher(e.getMessage()).find()) {
ioMsg += " " + e.getMessage();
}
}
IOException ex = new IOException(ioMsg);
gse = new GatewaySenderException(
String.format("%s : Could not connect due to: %s",
processor.getSender().getId(), ex.getMessage()),
ex);
}
return gse;
}
protected boolean logConnectionFailure() {
// always log the first failure
if (logger.isDebugEnabled() || failedConnectCount == 0) {
return true;
} else {
// subsequent failures will be logged on 30th, 300th, 3000th try
// each try is at 100millis from higher layer so this accounts for logging
// after 3s, 30s and then every 5mins
if (failedConnectCount >= 3000) {
return (failedConnectCount % 3000) == 0;
} else {
return (failedConnectCount == 30 || failedConnectCount == 300);
}
}
}
public static class GatewayAck {
private final int batchId;
private int numEvents;
private BatchException70 be;
public GatewayAck(BatchException70 be, int bId) {
this.be = be;
batchId = bId;
}
public GatewayAck(int batchId, int numEvents) {
this.batchId = batchId;
this.numEvents = numEvents;
}
/**
* @return the numEvents
*/
public int getNumEvents() {
return numEvents;
}
/**
* @return the batchId
*/
public int getBatchId() {
return batchId;
}
public BatchException70 getBatchException() {
return be;
}
}
class AckReaderThread extends Thread {
private final Object runningStateLock = new Object();
/**
* boolean to make a shutdown request
*/
private volatile boolean shutdown = false;
private final InternalCache cache;
private volatile boolean ackReaderThreadRunning = false;
public AckReaderThread(GatewaySender sender, AbstractGatewaySenderEventProcessor processor) {
this(sender, processor.getName());
}
boolean isShutdown() {
return shutdown;
}
public AckReaderThread(GatewaySender sender, String name) {
super("AckReaderThread for : " + name);
setDaemon(true);
cache = ((AbstractGatewaySender) sender).getCache();
}
public void waitForRunningAckReaderThreadRunningState() {
synchronized (runningStateLock) {
while (!ackReaderThreadRunning) {
try {
if (shutdown) {
break;
}
runningStateLock.wait();
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
break;
}
}
}
}
private boolean checkCancelled() {
if (shutdown) {
return true;
}
return cache.getCancelCriterion().isCancelInProgress();
}
@Override
public void run() {
if (logger.isDebugEnabled()) {
logger.debug("AckReaderThread started.. ");
}
synchronized (runningStateLock) {
ackReaderThreadRunning = true;
runningStateLock.notifyAll();
}
try {
for (;;) {
if (checkCancelled()) {
break;
}
GatewayAck ack = readAcknowledgement();
if (ack != null) {
boolean gotBatchException = ack.getBatchException() != null;
int batchId = ack.getBatchId();
int numEvents = ack.getNumEvents();
// If the batch is successfully processed, remove it from the
// queue.
if (gotBatchException) {
logger.warn(
"Gateway Sender {} : Received ack for batch id {} with one or more exceptions",
processor.getSender(), ack.getBatchId());
// If we get PDX related exception in the batch exception then try
// to resend all the pdx events as well in the next batch.
final GatewaySenderStats statistics = sender.getStatistics();
statistics.incBatchesRedistributed();
if (sender.isRemoveFromQueueOnException()) {
// log the batchExceptions
logBatchExceptions(ack.getBatchException());
processor.handleSuccessBatchAck(batchId);
} else {
// log the batchExceptions. These are exceptions that were not retried on the remote
// site (e.g. NotAuthorizedException)
// @TODO Shoud anything else be done here to warn that events are lost even though
// the boolean is false
logBatchExceptions(ack.getBatchException());
processor.handleSuccessBatchAck(batchId);
}
} // unsuccessful batch
else { // The batch was successful.
if (logger.isDebugEnabled()) {
logger.debug("Gateway Sender {} : Received ack for batch id {} of {} events",
processor.getSender(), ack.getBatchId(), ack.getNumEvents());
}
processor.handleSuccessBatchAck(batchId);
}
} else {
// If we have received IOException.
if (logger.isDebugEnabled()) {
logger.debug("{}: Received null ack from remote site.", processor.getSender());
}
processor.handleException();
// Check if canceled before sleeping
if (checkCancelled()) {
break;
}
try { // This wait is before trying to getting new connection to
// receive ack. Without this there will be continuous call to
// getConnection
Thread.sleep(GatewaySender.CONNECTION_RETRY_INTERVAL);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
}
}
}
} catch (Exception e) {
if (!checkCancelled()) {
logger.fatal(
"Stopping the processor because the following exception occurred while processing a batch:",
e);
} else {
return;
}
sender.getLifeCycleLock().writeLock().lock();
try {
processor.stopProcessing();
sender.clearTempEventsAfterSenderStopped();
} finally {
sender.getLifeCycleLock().writeLock().unlock();
}
// destroyConnection();
} finally {
if (logger.isDebugEnabled()) {
logger.debug("AckReaderThread exiting. ");
}
ackReaderThreadRunning = false;
}
}
protected void logBatchExceptions(BatchException70 exception) {
try {
for (BatchException70 be : exception.getExceptions()) {
boolean logWarning = true;
if (be.getCause() instanceof RegionDestroyedException) {
RegionDestroyedException rde = (RegionDestroyedException) be.getCause();
synchronized (notFoundRegionsSync) {
if (notFoundRegions.contains(rde.getRegionFullPath())) {
logWarning = false;
} else {
notFoundRegions.add(rde.getRegionFullPath());
}
}
} else if (be.getCause() instanceof IllegalStateException
&& be.getCause().getMessage().contains("Unknown pdx type")) {
List<GatewaySenderEventImpl> pdxEvents =
processor.getBatchIdToPDXEventsMap().get(be.getBatchId());
if (logWarning) {
logger.warn(String.format(
"A BatchException occurred processing PDX events. Index of array of Exception : %s",
be.getIndex()),
be);
}
if (pdxEvents != null) {
for (GatewaySenderEventImpl senderEvent : pdxEvents) {
senderEvent.setAcked(false);
}
GatewaySenderEventImpl gsEvent = pdxEvents.get(be.getIndex());
if (logWarning) {
logger.warn("The event being processed when the BatchException occurred was: {}",
gsEvent);
}
}
continue;
}
if (logWarning) {
logger.warn(
String.format(
"A BatchException occurred processing events. Index of Array of Exception : %s",
be.getIndex()),
be);
}
List<GatewaySenderEventImpl>[] eventsArr =
processor.getBatchIdToEventsMap().get(be.getBatchId());
if (eventsArr != null) {
List<GatewaySenderEventImpl> filteredEvents = eventsArr[1];
GatewaySenderEventImpl gsEvent =
filteredEvents.get(be.getIndex());
if (logWarning) {
logger.warn("The event being processed when the BatchException occurred was: {}",
gsEvent);
}
}
}
} catch (Exception e) {
logger.warn(
"An unexpected exception occurred processing a BatchException. The thread will continue.",
e);
}
}
boolean isRunning() {
return ackReaderThreadRunning;
}
public void shutdown() {
// we need to destroy connection irrespective of we are listening on it or
// not. No need to take lock as the reader thread may be blocked and we might not
// get chance to destroy unless that returns.
Connection conn = connection;
if (conn != null) {
shutDownAckReaderConnection(conn);
if (!conn.isDestroyed()) {
conn.destroy();
sender.getProxy().returnConnection(conn);
}
}
shutdown = true;
boolean interrupted = Thread.interrupted();
try {
join(15 * 1000);
} catch (InterruptedException e) {
interrupted = true;
} finally {
if (interrupted) {
Thread.currentThread().interrupt();
}
}
if (isAlive()) {
logger.warn("AckReaderThread ignored cancellation");
}
}
protected void shutDownAckReaderConnection(Connection connection) {
Connection conn = connection;
// attempt to unblock the ackReader thread by shutting down the inputStream, if it was stuck
// on a read
try {
if (conn != null && conn.getInputStream() != null) {
conn.getInputStream().close();
}
} catch (IOException e) {
logger.warn("Unable to shutdown AckReaderThread Connection");
} catch (ConnectionDestroyedException e) {
logger.info("AckReader shutting down and connection already destroyed");
}
}
}
public void stopAckReaderThread() {
if (ackReaderThread != null) {
ackReaderThread.shutdown();
}
}
@Override
public boolean isRemoteDispatcher() {
return true;
}
@Override
public boolean isConnectedToRemote() {
return connection != null && !connection.isDestroyed();
}
@Override
public void shutDownAckReaderConnection() {
if (ackReaderThread != null) {
ackReaderThread.shutDownAckReaderConnection(connection);
ackReaderThread.shutdown();
}
}
@Override
public void stop() {
stopAckReaderThread();
if (processor.isStopped()) {
destroyConnection();
}
}
private void sleepBeforeRetry() {
try {
Thread.sleep(RETRY_WAIT_TIME);
} catch (InterruptedException ie) {
Thread.currentThread().interrupt();
}
}
private void logAndStopProcessor(final Exception ex) {
if (ex instanceof CancelException) {
if (logger.isDebugEnabled()) {
logger
.debug("Stopping the processor because cancellation occurred while processing a batch");
}
} else {
logger.fatal(
"Stopping the processor because the following exception occurred while processing a batch:",
ex);
}
processor.setIsStopped(true);
}
private static class RecoverableExceptionPredicates {
static boolean isRecoverableWhenReadingAck(final Exception ex) {
/*
* It is considered non-recoverable if the PDX registry files are deleted from the sending
* side of a WAN Gateway. This is determined by checking if the cause of the
* {@link ServerConnectivityException} is caused by a {@link PdxRegistryMismatchException}
*/
return isRecoverableInAllCases(ex)
|| (ex instanceof ServerConnectivityException
&& !(ex.getCause() instanceof PdxRegistryMismatchException));
}
static boolean isRecoverableWhenDispatchingBatch(final Throwable t) {
/*
* We consider {@link ServerConnectivityException} to be a temporary connectivity issue and
* is therefore recoverable. The {@link IllegalStateException} can occur if off-heap is used,
* and a GatewaySenderEventImpl is serialized after being freed. This can happen if the
* region is destroyed concurrently while the gateway sender event is being processed.
*/
return isRecoverableInAllCases(t)
|| t instanceof ServerConnectivityException
|| t instanceof IllegalStateException;
}
/**
* Certain exception types are considered recoverable when either dispatching a batch or
* reading an acknowledgement.
*/
private static boolean isRecoverableInAllCases(final Throwable t) {
/*
* {@link IOException} and {@link ConnectionDestroyedException} can occur
* due to temporary network issues and therefore are recoverable.
* {@link GemFireSecurityException} represents an inability to authenticate with the
* gateway receiver.
*
* By treating {@link GemFireSecurityException} as recoverable we are continuing to retry
* in a couple situations:
*
* <ul>
* <li>The implementation of the {@link SecurityManager} loses connectivity to the actual
* authentication authority e.g. Active Directory</li> (expecting that connectivity will
* later be restored)
* <li>Credentials are invalid (expecting that they will later become valid)</li>
* </ul>
*/
return t instanceof IOException
|| t instanceof ConnectionDestroyedException
|| t instanceof GemFireSecurityException;
}
}
@Override
public void sendBatch(List<GatewayQueueEvent<?, ?>> events, Connection connection,
ExecutablePool senderPool, int batchId, boolean removeFromQueueOnException)
throws BatchException70 {
GatewaySenderBatchOp.executeOn(connection, senderPool, events, batchId,
removeFromQueueOnException, false);
GatewaySenderEventRemoteDispatcher.GatewayAck ack =
(GatewaySenderEventRemoteDispatcher.GatewayAck) GatewaySenderBatchOp.executeOn(connection,
senderPool);
if (ack == null) {
throw new BatchException70("Unknown error sending batch", null, 0, batchId);
}
if (ack.getBatchException() != null) {
throw ack.getBatchException();
}
}
}
|
apache/rya | 37,265 | extras/indexing/src/test/java/org/apache/rya/indexing/mongo/MongoTemporalIndexerIT.java | package org.apache.rya.indexing.mongo;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
import static org.apache.rya.api.resolver.RdfToRyaConversions.convertStatement;
import static org.junit.Assert.assertEquals;
import static org.junit.Assert.assertFalse;
import static org.junit.Assert.assertTrue;
import java.io.IOException;
import java.security.NoSuchAlgorithmException;
import java.util.Arrays;
import java.util.HashSet;
import org.apache.accumulo.core.client.TableExistsException;
import org.apache.accumulo.core.client.TableNotFoundException;
import org.apache.rya.indexing.StatementConstraints;
import org.apache.rya.indexing.TemporalInstant;
import org.apache.rya.indexing.TemporalInstantRfc3339;
import org.apache.rya.indexing.TemporalInterval;
import org.apache.rya.indexing.accumulo.ConfigUtils;
import org.apache.rya.indexing.mongodb.temporal.MongoTemporalIndexer;
import org.apache.rya.mongodb.MongoDBRdfConfiguration;
import org.apache.rya.mongodb.MongoRyaITBase;
import org.bson.Document;
import org.eclipse.rdf4j.common.iteration.CloseableIteration;
import org.eclipse.rdf4j.model.IRI;
import org.eclipse.rdf4j.model.Statement;
import org.eclipse.rdf4j.model.ValueFactory;
import org.eclipse.rdf4j.model.impl.SimpleValueFactory;
import org.eclipse.rdf4j.model.vocabulary.RDFS;
import org.eclipse.rdf4j.query.QueryEvaluationException;
import org.junit.Test;
import com.mongodb.MongoException;
import com.mongodb.MongoSecurityException;
import com.mongodb.client.MongoCollection;
import com.mongodb.client.MongoCursor;
import com.mongodb.client.MongoDatabase;
/**
* JUnit tests for TemporalIndexer and it's implementation MongoTemporalIndexer
*
* If you enjoy this test, please read RyaTemporalIndexerTest and YagoKBTest, which contain
* many example SPARQL queries and updates and attempts to test independently of Mongo:
*
* extras/indexingSail/src/test/java/org.apache/rya/indexing/Mongo/RyaTemporalIndexerTest.java
* {@link org.apache.rya.indexing.Mongo.RyaTemporalIndexerTest}
* {@link org.apache.rya.indexing.Mongo.YagoKBTest.java}
*
* Remember, this class in instantiated fresh for each @test method.
* so fields are reset, unless they are static.
*
* These are covered:
* Instance {before, equals, after} given Instance
* Instance {before, after, inside} given Interval
* Instance {hasBeginning, hasEnd} given Interval
* And a few more.
*
*/
public final class MongoTemporalIndexerIT extends MongoRyaITBase {
private static final String URI_PROPERTY_EVENT_TIME = "Property:event:time";
private static final String URI_PROPERTY_CIRCA = "Property:circa";
private static final String URI_PROPERTY_AT_TIME = "Property:atTime";
private static final StatementConstraints EMPTY_CONSTRAINTS = new StatementConstraints();
// Assign this in setUpBeforeClass, store them in each test.
// setup() deletes table before each test.
static final Statement spo_B00_E01;
static final Statement spo_B03_E20;
static final Statement spo_B02_E29;
static final Statement spo_B02_E30;
static final Statement spo_B02_E40;
static final Statement spo_B02_E31;
static final Statement spo_B29_E30;
static final Statement spo_B30_E32;
// Instants:
static final Statement spo_B02;
static final int SERIES_OF_SECONDS = 41;
static final Statement seriesSpo[] = new Statement[SERIES_OF_SECONDS];
// These are shared for several tests. Only the seconds are different.
// tvB03_E20 read as: interval Begins 3 seconds, ends at 20 seconds
static final TemporalInterval tvB00_E01 = new TemporalInterval(makeInstant(00), makeInstant(01));
static final TemporalInterval tvB29_E30= new TemporalInterval(makeInstant(29), makeInstant(30));
static final TemporalInterval tvB30_E32= new TemporalInterval(makeInstant(30), makeInstant(32));
static final TemporalInterval tvB03_E20 = new TemporalInterval(makeInstant(03), makeInstant(20));
// 30 seconds, Begins earlier, ends later
static final TemporalInterval tvB02_E30= new TemporalInterval(makeInstant(02), makeInstant(30));
// use for interval after
static final TemporalInterval tvB02_E29= new TemporalInterval(makeInstant(02), makeInstant(29));
// same as above, but ends in the middle
static final TemporalInterval tvB02_E31 = new TemporalInterval(makeInstant(02), makeInstant(31));
// same as above, but ends even later
static final TemporalInterval tvB02_E40 = new TemporalInterval(makeInstant(02), makeInstant(40));
// instant, match beginnings of several above, before tiB03_E20
static final TemporalInstant tsB02 = makeInstant(02);
// instant, after all above
static final TemporalInstant tsB04 = makeInstant(04);
// Create a series of instants about times 0 - 40 seconds
static final TemporalInstant seriesTs[];
static {
seriesTs = new TemporalInstant[SERIES_OF_SECONDS];
for (int i = 0; i <= 40; i++) {
seriesTs[i] = makeInstant(i);
}
};
/**
* Make an uniform instant with given seconds.
*/
static TemporalInstant makeInstant(final int secondsMakeMeUnique) {
return new TemporalInstantRfc3339(2015, 12, 30, 12, 00, secondsMakeMeUnique);
}
static {
// Setup the statements only once. Each test will store some of these in there own index table.
final ValueFactory vf = SimpleValueFactory.getInstance();
final IRI pred1_atTime = vf.createIRI(URI_PROPERTY_AT_TIME);
// tiB03_E20 read as: time interval that Begins 3 seconds, ends at 20 seconds,
// Each time element the same, except seconds. year, month, .... minute are the same for each statement below.
spo_B00_E01 = vf.createStatement(vf.createIRI("foo:event0"), pred1_atTime, vf.createLiteral(tvB00_E01.toString()));
spo_B02_E29 = vf.createStatement(vf.createIRI("foo:event2"), pred1_atTime, vf.createLiteral(tvB02_E29.toString()));
spo_B02_E30 = vf.createStatement(vf.createIRI("foo:event2"), pred1_atTime, vf.createLiteral(tvB02_E30.toString()));
spo_B02_E31 = vf.createStatement(vf.createIRI("foo:event3"), pred1_atTime, vf.createLiteral(tvB02_E31.toString()));
spo_B02_E40 = vf.createStatement(vf.createIRI("foo:event4"), pred1_atTime, vf.createLiteral(tvB02_E40.toString()));
spo_B03_E20 = vf.createStatement(vf.createIRI("foo:event5"), pred1_atTime, vf.createLiteral(tvB03_E20.toString()));
spo_B29_E30 = vf.createStatement(vf.createIRI("foo:event1"), pred1_atTime, vf.createLiteral(tvB29_E30.toString()));
spo_B30_E32 = vf.createStatement(vf.createIRI("foo:event1"), pred1_atTime, vf.createLiteral(tvB30_E32.toString()));
spo_B02 = vf.createStatement(vf.createIRI("foo:event6"), pred1_atTime, vf.createLiteral(tsB02.getAsReadable()));
// Create statements about time instants 0 - 40 seconds
for (int i = 0; i < seriesTs.length; i++) {
seriesSpo[i] = vf.createStatement(vf.createIRI("foo:event0" + i), pred1_atTime, vf.createLiteral(seriesTs[i].getAsReadable()));
}
}
@Override
protected void updateConfiguration(final MongoDBRdfConfiguration conf) {
// This is from http://linkedevents.org/ontology
// and http://motools.sourceforge.net/event/event.html
conf.setStrings(ConfigUtils.TEMPORAL_PREDICATES_LIST, ""
+ URI_PROPERTY_AT_TIME + ","
+ URI_PROPERTY_CIRCA + ","
+ URI_PROPERTY_EVENT_TIME);
}
/**
* Test method for {@link MongoTemporalIndexer#storeStatement(convertStatement(org.eclipse.rdf4j.model.Statement)}
*/
@Test
public void testStoreStatement() throws IOException {
try(MongoTemporalIndexer tIndexer = new MongoTemporalIndexer()) {
tIndexer.setConf(conf);
tIndexer.init();
final ValueFactory vf = SimpleValueFactory.getInstance();
final IRI pred1_atTime = vf.createIRI(URI_PROPERTY_AT_TIME);
final IRI pred2_circa = vf.createIRI(URI_PROPERTY_CIRCA);
// Should not be stored because they are not in the predicate list
final String validDateStringWithThirteens = "1313-12-13T13:13:13Z";
tIndexer.storeStatement(convertStatement(vf.createStatement(vf.createIRI("foo:subj1"), RDFS.LABEL, vf.createLiteral(validDateStringWithThirteens))));
final String invalidDateString = "ThisIsAnInvalidDate";
tIndexer.storeStatement(convertStatement(vf.createStatement(vf.createIRI("foo:subj2"), pred1_atTime, vf.createLiteral(invalidDateString))));
// These are different datetimes instant but from different time zones.
// This is an arbitrary zone, BRST=Brazil, better if not local.
// same as "2015-01-01T01:59:59Z"
final String testDate2014InBRST = "2014-12-31T23:59:59-02:00";
// next year, same as "2017-01-01T01:59:59Z"
final String testDate2016InET = "2016-12-31T20:59:59-05:00";
// These should be stored because they are in the predicate list.
// BUT they will get converted to the same exact datetime in UTC.
final Statement s3 = vf.createStatement(vf.createIRI("foo:subj3"), pred1_atTime, vf.createLiteral(testDate2014InBRST));
final Statement s4 = vf.createStatement(vf.createIRI("foo:subj4"), pred2_circa, vf.createLiteral(testDate2016InET));
tIndexer.storeStatement(convertStatement(s3));
tIndexer.storeStatement(convertStatement(s4));
// This should not be stored because the object is not a literal
tIndexer.storeStatement(convertStatement(vf.createStatement(vf.createIRI("foo:subj5"), pred1_atTime, vf.createIRI("in:valid"))));
printTables(tIndexer, "junit testing: Temporal entities stored in testStoreStatement");
assertEquals(2, tIndexer.getCollection().countDocuments());
}
}
@Test
public void testDelete() throws IOException, MongoException, TableNotFoundException, TableExistsException, NoSuchAlgorithmException {
try(MongoTemporalIndexer tIndexer = new MongoTemporalIndexer()) {
tIndexer.setConf(conf);
tIndexer.init();
final ValueFactory vf = SimpleValueFactory.getInstance();
final IRI pred1_atTime = vf.createIRI(URI_PROPERTY_AT_TIME);
final IRI pred2_circa = vf.createIRI(URI_PROPERTY_CIRCA);
final String testDate2014InBRST = "2014-12-31T23:59:59-02:00";
final String testDate2016InET = "2016-12-31T20:59:59-05:00";
// These should be stored because they are in the predicate list.
// BUT they will get converted to the same exact datetime in UTC.
final Statement s1 = vf.createStatement(vf.createIRI("foo:subj3"), pred1_atTime, vf.createLiteral(testDate2014InBRST));
final Statement s2 = vf.createStatement(vf.createIRI("foo:subj4"), pred2_circa, vf.createLiteral(testDate2016InET));
tIndexer.storeStatement(convertStatement(s1));
tIndexer.storeStatement(convertStatement(s2));
final String dbName = conf.getMongoDBName();
final MongoDatabase db = super.getMongoClient().getDatabase(dbName);
final MongoCollection<Document> collection = db.getCollection(conf.get(MongoDBRdfConfiguration.MONGO_COLLECTION_PREFIX, "rya") + tIndexer.getCollectionName());
printTables(tIndexer, "junit testing: Temporal entities stored in testDelete before delete");
assertEquals("Number of rows stored.", 2, collection.countDocuments()); // 4 index entries per statement
tIndexer.deleteStatement(convertStatement(s1));
tIndexer.deleteStatement(convertStatement(s2));
printTables(tIndexer, "junit testing: Temporal entities stored in testDelete after delete");
assertEquals("Number of rows stored after delete.", 0, collection.countDocuments());
}
}
/**
* Test instant after a given instant.
* From the series: instant {equal, before, after} instant
* @throws MongoSecurityException
* @throws MongoException
* @throws TableNotFoundException
*/
@Test
public void testQueryInstantAfterInstant() throws IOException, QueryEvaluationException, TableNotFoundException, MongoException {
try(MongoTemporalIndexer tIndexer = new MongoTemporalIndexer()) {
tIndexer.setConf(conf);
tIndexer.init();
// tiB02_E30 read as: Begins 2 seconds, ends at 30 seconds
// these should not match as they are not instances.
tIndexer.storeStatement(convertStatement(spo_B03_E20));
tIndexer.storeStatement(convertStatement(spo_B02_E30));
tIndexer.storeStatement(convertStatement(spo_B02_E40));
tIndexer.storeStatement(convertStatement(spo_B02_E31));
tIndexer.storeStatement(convertStatement(spo_B30_E32));
// seriesSpo[s] and seriesTs[s] are statements and instant for s seconds after the uniform time.
final int searchForSeconds = 4;
final int expectedResultCount = 9;
for (int s = 0; s <= searchForSeconds + expectedResultCount; s++) { // <== logic here
tIndexer.storeStatement(convertStatement(seriesSpo[s]));
}
CloseableIteration<Statement, QueryEvaluationException> iter;
iter = tIndexer.queryInstantAfterInstant(seriesTs[searchForSeconds], EMPTY_CONSTRAINTS);
int count = 0;
while (iter.hasNext()) {
final Statement s = iter.next();
final Statement nextExpectedStatement = seriesSpo[searchForSeconds + count + 1]; // <== logic here
assertTrue("Should match: " + nextExpectedStatement + " == " + s, nextExpectedStatement.equals(s));
count++;
}
assertEquals("Should find count of rows.", expectedResultCount, count);
}
}
/**
* Test instant before a given instant.
* From the series: instant {equal, before, after} instant
*/
@Test
public void testQueryInstantBeforeInstant() throws IOException, QueryEvaluationException {
try(MongoTemporalIndexer tIndexer = new MongoTemporalIndexer()) {
tIndexer.setConf(conf);
tIndexer.init();
// tiB02_E30 read as: Begins 2 seconds, ends at 30 seconds
// these should not match as they are not instances.
tIndexer.storeStatement(convertStatement(spo_B03_E20));
tIndexer.storeStatement(convertStatement(spo_B02_E30));
tIndexer.storeStatement(convertStatement(spo_B02_E40));
tIndexer.storeStatement(convertStatement(spo_B02_E31));
tIndexer.storeStatement(convertStatement(spo_B30_E32));
// seriesSpo[s] and seriesTs[s] are statements and instant for s seconds after the uniform time.
final int searchForSeconds = 4;
final int expectedResultCount = 4;
for (int s = 0; s <= searchForSeconds + 15; s++) { // <== logic here
tIndexer.storeStatement(convertStatement(seriesSpo[s]));
}
CloseableIteration<Statement, QueryEvaluationException> iter;
iter = tIndexer.queryInstantBeforeInstant(seriesTs[searchForSeconds], EMPTY_CONSTRAINTS);
int count = 0;
while (iter.hasNext()) {
final Statement s = iter.next();
final Statement nextExpectedStatement = seriesSpo[count]; // <== logic here
assertTrue("Should match: " + nextExpectedStatement + " == " + s, nextExpectedStatement.equals(s));
count++;
}
assertEquals("Should find count of rows.", expectedResultCount, count);
}
}
/**
* Test instant before given interval.
* From the series: Instance {before, after, inside} given Interval
*/
@Test
public void testQueryInstantBeforeInterval() throws IOException, QueryEvaluationException {
try(MongoTemporalIndexer tIndexer = new MongoTemporalIndexer()) {
tIndexer.setConf(conf);
tIndexer.init();
// tiB02_E30 read as: Begins 2 seconds, ends at 30 seconds
// these should not match as they are not instances.
tIndexer.storeStatement(convertStatement(spo_B03_E20));
tIndexer.storeStatement(convertStatement(spo_B02_E30));
tIndexer.storeStatement(convertStatement(spo_B02_E40));
tIndexer.storeStatement(convertStatement(spo_B02_E31));
tIndexer.storeStatement(convertStatement(spo_B30_E32));
// seriesSpo[s] and seriesTs[s] are statements and instants for s seconds after the uniform time.
final TemporalInterval searchForSeconds = tvB02_E31;
final int expectedResultCount = 2; // 00 and 01 seconds.
for (int s = 0; s <= 40; s++) { // <== logic here
tIndexer.storeStatement(convertStatement(seriesSpo[s]));
}
CloseableIteration<Statement, QueryEvaluationException> iter;
iter = tIndexer.queryInstantBeforeInterval(searchForSeconds, EMPTY_CONSTRAINTS);
int count = 0;
while (iter.hasNext()) {
final Statement s = iter.next();
final Statement nextExpectedStatement = seriesSpo[count]; // <== logic here
assertTrue("Should match: " + nextExpectedStatement + " == " + s, nextExpectedStatement.equals(s));
count++;
}
assertEquals("Should find count of rows.", expectedResultCount, count);
}
}
/**
* Test instant after given interval.
* Instance {before, after, inside} given Interval
*/
@Test
public void testQueryInstantAfterInterval() throws IOException, QueryEvaluationException {
try(MongoTemporalIndexer tIndexer = new MongoTemporalIndexer()) {
tIndexer.setConf(conf);
tIndexer.init();
// tiB02_E30 read as: Begins 2 seconds, ends at 30 seconds
// these should not match as they are not instances.
tIndexer.storeStatement(convertStatement(spo_B03_E20));
tIndexer.storeStatement(convertStatement(spo_B02_E30));
tIndexer.storeStatement(convertStatement(spo_B02_E40));
tIndexer.storeStatement(convertStatement(spo_B02_E31));
tIndexer.storeStatement(convertStatement(spo_B30_E32));
// seriesSpo[s] and seriesTs[s] are statements and instants for s seconds after the uniform time.
final TemporalInterval searchAfterInterval = tvB02_E31; // from 2 to 31 seconds
final int endingSeconds = 31;
final int expectedResultCount = 9; // 32,33,...,40 seconds.
for (int s = 0; s <= endingSeconds + expectedResultCount; s++) { // <== logic here
tIndexer.storeStatement(convertStatement(seriesSpo[s]));
}
CloseableIteration<Statement, QueryEvaluationException> iter;
iter = tIndexer.queryInstantAfterInterval(searchAfterInterval, EMPTY_CONSTRAINTS);
int count = 0;
while (iter.hasNext()) {
final Statement s = iter.next();
final Statement nextExpectedStatement = seriesSpo[count + endingSeconds + 1]; // <== logic here
assertTrue("Should match: " + nextExpectedStatement + " == " + s, nextExpectedStatement.equals(s));
count++;
}
assertEquals("Should find count of rows.", expectedResultCount, count);
}
}
/**
* Test instant inside given interval.
* Instance {before, after, inside} given Interval
*/
@Test
public void testQueryInstantInsideInterval() throws IOException, QueryEvaluationException {
try(MongoTemporalIndexer tIndexer = new MongoTemporalIndexer()) {
tIndexer.setConf(conf);
tIndexer.init();
// tiB02_E30 read as: Begins 2 seconds, ends at 30 seconds
// these should not match as they are not instances.
tIndexer.storeStatement(convertStatement(spo_B03_E20));
tIndexer.storeStatement(convertStatement(spo_B02_E30));
tIndexer.storeStatement(convertStatement(spo_B02_E40));
tIndexer.storeStatement(convertStatement(spo_B02_E31));
tIndexer.storeStatement(convertStatement(spo_B30_E32));
// seriesSpo[s] and seriesTs[s] are statements and instants for s seconds after the uniform time.
final TemporalInterval searchInsideInterval = tvB02_E31; // from 2 to 31 seconds
final int beginningSeconds = 2; // <== logic here, and next few lines.
final int endingSeconds = 31;
final int expectedResultCount = endingSeconds - beginningSeconds - 1; // 3,4,...,30 seconds.
for (int s = 0; s <= 40; s++) {
tIndexer.storeStatement(convertStatement(seriesSpo[s]));
}
CloseableIteration<Statement, QueryEvaluationException> iter;
iter = tIndexer.queryInstantInsideInterval(searchInsideInterval, EMPTY_CONSTRAINTS);
int count = 0;
while (iter.hasNext()) {
final Statement s = iter.next();
final Statement nextExpectedStatement = seriesSpo[count + beginningSeconds + 1]; // <== logic here
assertTrue("Should match: " + nextExpectedStatement + " == " + s, nextExpectedStatement.equals(s));
count++;
}
assertEquals("Should find count of rows.", expectedResultCount, count);
}
}
/**
* Test instant is the Beginning of the given interval.
* from the series: Instance {hasBeginning, hasEnd} Interval
*/
@Test
public void testQueryInstantHasBeginningInterval() throws IOException, QueryEvaluationException {
try(MongoTemporalIndexer tIndexer = new MongoTemporalIndexer()) {
tIndexer.setConf(conf);
tIndexer.init();
// tiB02_E30 read as: Begins 2 seconds, ends at 30 seconds
// these should not match as they are not instances.
tIndexer.storeStatement(convertStatement(spo_B03_E20));
tIndexer.storeStatement(convertStatement(spo_B02_E30));
tIndexer.storeStatement(convertStatement(spo_B02_E40));
tIndexer.storeStatement(convertStatement(spo_B02_E31));
tIndexer.storeStatement(convertStatement(spo_B30_E32));
// seriesSpo[s] and seriesTs[s] are statements and instants for s seconds after the uniform time.
final TemporalInterval searchInsideInterval = tvB02_E31; // from 2 to 31 seconds
final int searchSeconds = 2; // <== logic here, and next few lines.
final int expectedResultCount = 1; // 2 seconds.
for (int s = 0; s <= 10; s++) {
tIndexer.storeStatement(convertStatement(seriesSpo[s]));
}
CloseableIteration<Statement, QueryEvaluationException> iter;
iter = tIndexer.queryInstantHasBeginningInterval(searchInsideInterval, EMPTY_CONSTRAINTS);
int count = 0;
while (iter.hasNext()) {
final Statement s = iter.next();
final Statement nextExpectedStatement = seriesSpo[searchSeconds]; // <== logic here
assertTrue("Should match: " + nextExpectedStatement + " == " + s, nextExpectedStatement.equals(s));
count++;
}
assertEquals("Should find count of rows.", expectedResultCount, count);
}
}
/**
* Test instant is the end of the given interval.
* from the series: Instance {hasBeginning, hasEnd} Interval
*/
@Test
public void testQueryInstantHasEndInterval() throws IOException, QueryEvaluationException {
try(MongoTemporalIndexer tIndexer = new MongoTemporalIndexer()) {
tIndexer.setConf(conf);
tIndexer.init();
// tiB02_E30 read as: Begins 2 seconds, ends at 30 seconds
// these should not match as they are not instances.
tIndexer.storeStatement(convertStatement(spo_B03_E20));
tIndexer.storeStatement(convertStatement(spo_B02_E30));
tIndexer.storeStatement(convertStatement(spo_B02_E40));
tIndexer.storeStatement(convertStatement(spo_B02_E31));
tIndexer.storeStatement(convertStatement(spo_B30_E32));
// seriesSpo[s] and seriesTs[s] are statements and instants for s seconds after the uniform time.
final TemporalInterval searchInsideInterval = tvB02_E31; // from 2 to 31 seconds
final int searchSeconds = 31; // <== logic here, and next few lines.
final int expectedResultCount = 1; // 31 seconds.
for (int s = 0; s <= 40; s++) {
tIndexer.storeStatement(convertStatement(seriesSpo[s]));
}
CloseableIteration<Statement, QueryEvaluationException> iter;
iter = tIndexer.queryInstantHasEndInterval(searchInsideInterval, EMPTY_CONSTRAINTS);
int count = 0;
while (iter.hasNext()) {
final Statement s = iter.next();
final Statement nextExpectedStatement = seriesSpo[searchSeconds]; // <== logic here
assertTrue("Should match: " + nextExpectedStatement + " == " + s, nextExpectedStatement.equals(s));
count++;
}
assertEquals("Should find count of rows.", expectedResultCount, count);
}
}
/**
* Test method for
* {@link org.apache.rya.indexing.Mongo.temporal.MongoTemporalIndexer#queryIntervalEquals(TemporalInterval, StatementConstraints)}
* .
* @throws IOException
* @throws QueryEvaluationException
*
*/
@Test
public void testQueryIntervalEquals() throws IOException, QueryEvaluationException {
try(MongoTemporalIndexer tIndexer = new MongoTemporalIndexer()) {
tIndexer.setConf(conf);
tIndexer.init();
// tiB02_E30 read as: Begins 2 seconds, ends at 30 seconds
tIndexer.storeStatement(convertStatement(spo_B03_E20));
tIndexer.storeStatement(convertStatement(spo_B02_E30));
tIndexer.storeStatement(convertStatement(spo_B02_E40));
tIndexer.storeStatement(convertStatement(spo_B02_E31));
tIndexer.storeStatement(convertStatement(spo_B30_E32));
tIndexer.storeStatement(convertStatement(seriesSpo[4])); // instance at 4 seconds
CloseableIteration<Statement, QueryEvaluationException> iter;
iter = tIndexer.queryIntervalEquals(tvB02_E40, EMPTY_CONSTRAINTS);
// Should be found twice:
assertTrue("queryIntervalEquals: spo_B02_E40 should be found, but actually returned empty results. spo_B02_E40=" + spo_B02_E40, iter.hasNext());
assertTrue("queryIntervalEquals: spo_B02_E40 should be found, but does not match.", spo_B02_E40.equals(iter.next()));
assertFalse("queryIntervalEquals: Find no more than one, but actually has more.", iter.hasNext());
}
}
/**
* Test interval before a given interval, for method:
* {@link MongoTemporalIndexer#queryIntervalBefore(TemporalInterval, StatementConstraints)}.
*
* @throws IOException
* @throws QueryEvaluationException
*/
@Test
public void testQueryIntervalBefore() throws IOException, QueryEvaluationException {
try(MongoTemporalIndexer tIndexer = new MongoTemporalIndexer()) {
tIndexer.setConf(conf);
tIndexer.init();
// tiB02_E30 read as: Begins 2 seconds, ends at 30 seconds
tIndexer.storeStatement(convertStatement(spo_B00_E01));
tIndexer.storeStatement(convertStatement(spo_B02_E30));
tIndexer.storeStatement(convertStatement(spo_B02_E31));
tIndexer.storeStatement(convertStatement(spo_B02_E40));
tIndexer.storeStatement(convertStatement(spo_B03_E20));
// instants should be ignored.
tIndexer.storeStatement(convertStatement(spo_B30_E32));
tIndexer.storeStatement(convertStatement(seriesSpo[1])); // instance at 1 seconds
tIndexer.storeStatement(convertStatement(seriesSpo[2]));
tIndexer.storeStatement(convertStatement(seriesSpo[31]));
CloseableIteration<Statement, QueryEvaluationException> iter;
iter = tIndexer.queryIntervalBefore(tvB02_E31, EMPTY_CONSTRAINTS);
// Should be found twice:
assertTrue("spo_B00_E01 should be found, but actually returned empty results. spo_B00_E01=" + spo_B00_E01, iter.hasNext());
assertTrue("spo_B00_E01 should be found, but found another.", spo_B00_E01.equals(iter.next()));
assertFalse("Find no more than one, but actually has more.", iter.hasNext());
}
}
/**
* interval is after the given interval. Find interval beginnings after the endings of the given interval.
* {@link MongoTemporalIndexer#queryIntervalAfter(TemporalInterval, StatementContraints).
*
* @throws IOException
* @throws QueryEvaluationException
*/
@Test
public void testQueryIntervalAfter() throws IOException, QueryEvaluationException {
try(MongoTemporalIndexer tIndexer = new MongoTemporalIndexer()) {
tIndexer.setConf(conf);
tIndexer.init();
// tiB02_E30 read as: Begins 2 seconds, ends at 30 seconds
tIndexer.storeStatement(convertStatement(spo_B00_E01));
tIndexer.storeStatement(convertStatement(spo_B02_E29)); //<- after this one.
tIndexer.storeStatement(convertStatement(spo_B02_E30));
tIndexer.storeStatement(convertStatement(spo_B02_E31));
tIndexer.storeStatement(convertStatement(spo_B02_E40));
tIndexer.storeStatement(convertStatement(spo_B03_E20));
tIndexer.storeStatement(convertStatement(spo_B29_E30));
tIndexer.storeStatement(convertStatement(spo_B30_E32));
// instants should be ignored.
tIndexer.storeStatement(convertStatement(spo_B02));
tIndexer.storeStatement(convertStatement(seriesSpo[1])); // instance at 1 seconds
tIndexer.storeStatement(convertStatement(seriesSpo[2]));
tIndexer.storeStatement(convertStatement(seriesSpo[31]));
CloseableIteration<Statement, QueryEvaluationException> iter;
iter = tIndexer.queryIntervalAfter(tvB02_E29, EMPTY_CONSTRAINTS);
// Should be found twice:
assertTrue("spo_B30_E32 should be found, but actually returned empty results. spo_B30_E32=" + spo_B30_E32, iter.hasNext());
final Statement s = iter.next();
assertTrue("spo_B30_E32 should be found, but found another. spo_B30_E32="+spo_B30_E32+", but found="+s, spo_B30_E32.equals(s));
assertFalse("Find no more than one, but actually has more.", iter.hasNext());
}
}
/**
* Test instant after a given instant WITH two different predicates as constraints.
*/
@Test
public void testQueryWithMultiplePredicates() throws IOException, QueryEvaluationException {
try(MongoTemporalIndexer tIndexer = new MongoTemporalIndexer()) {
tIndexer.setConf(conf);
tIndexer.init();
// tiB02_E30 read as: Begins 2 seconds, ends at 30 seconds
// these should not match as they are not instances.
tIndexer.storeStatement(convertStatement(spo_B03_E20));
tIndexer.storeStatement(convertStatement(spo_B02_E30));
tIndexer.storeStatement(convertStatement(spo_B02_E40));
tIndexer.storeStatement(convertStatement(spo_B02_E31));
tIndexer.storeStatement(convertStatement(spo_B30_E32));
// seriesSpo[s] and seriesTs[s] are statements and instant for s seconds after the uniform time.
final int searchForSeconds = 4;
final int expectedResultCount = 9;
for (int s = 0; s <= searchForSeconds + expectedResultCount; s++) { // <== logic here
tIndexer.storeStatement(convertStatement(seriesSpo[s]));
}
final ValueFactory vf = SimpleValueFactory.getInstance();
final IRI pred3_CIRCA_ = vf.createIRI(URI_PROPERTY_CIRCA); // this one to ignore.
final IRI pred2_eventTime = vf.createIRI(URI_PROPERTY_EVENT_TIME);
final IRI pred1_atTime = vf.createIRI(URI_PROPERTY_AT_TIME);
// add the predicate = EventTime ; Store in an array for verification.
final Statement[] SeriesTs_EventTime = new Statement[expectedResultCount+1];
for (int s = 0; s <= searchForSeconds + expectedResultCount; s++) { // <== logic here
final Statement statement = vf.createStatement(vf.createIRI("foo:EventTimeSubj0" + s), pred2_eventTime, vf.createLiteral(seriesTs[s].getAsReadable()));
tIndexer.storeStatement(convertStatement(statement));
if (s>searchForSeconds) {
SeriesTs_EventTime[s - searchForSeconds -1 ] = statement;
}
}
// add the predicate = CIRCA ; to be ignored because it is not in the constraints.
for (int s = 0; s <= searchForSeconds + expectedResultCount; s++) { // <== logic here
final Statement statement = vf.createStatement(vf.createIRI("foo:CircaEventSubj0" + s), pred3_CIRCA_, vf.createLiteral(seriesTs[s].getAsReadable()));
tIndexer.storeStatement(convertStatement(statement));
}
CloseableIteration<Statement, QueryEvaluationException> iter;
final StatementConstraints constraints = new StatementConstraints();
constraints.setPredicates(new HashSet<>(Arrays.asList( pred2_eventTime, pred1_atTime )));
iter = tIndexer.queryInstantAfterInstant(seriesTs[searchForSeconds], constraints); // EMPTY_CONSTRAINTS);//
int count_AtTime = 0;
int count_EventTime = 0;
while (iter.hasNext()) {
final Statement s = iter.next();
final Statement nextExpectedStatement = seriesSpo[searchForSeconds + count_AtTime + 1]; // <== logic here
if (s.getPredicate().equals(pred1_atTime)) {
assertTrue("Should match atTime: " + nextExpectedStatement + " == " + s, nextExpectedStatement.equals(s));
count_AtTime++;
}
else if (s.getPredicate().equals(pred2_eventTime)) {
assertTrue("Should match eventTime: " + SeriesTs_EventTime[count_EventTime] + " == " + s, SeriesTs_EventTime[count_EventTime].equals(s));
count_EventTime++;
} else {
assertTrue("This predicate should not be returned: "+s, false);
}
}
assertEquals("Should find count of atTime rows.", expectedResultCount, count_AtTime);
assertEquals("Should find count of eventTime rows.", expectedResultCount, count_EventTime);
}
}
/**
* Print and gather statistics on the entire index table.
*
* @param description
* Printed to the console to find the test case.
* @param out
* null or System.out or other output to send a listing.
* @param statistics
* Hashes, sums, and counts for assertions.
* @return Count of entries in the index table.
* @throws IOException
*/
public void printTables(final MongoTemporalIndexer tIndexer, final String description) throws IOException {
System.out.println("-- start printTables() -- " + description);
System.out.println("Reading : " + tIndexer.getCollection().getNamespace().getFullName());
try (final MongoCursor<Document> cursor = tIndexer.getCollection().find().iterator()) {
while(cursor.hasNext()) {
final Document doc = cursor.next();
System.out.println(doc.toString());
}
}
System.out.println();
}
}
|
googleapis/google-cloud-java | 36,787 | java-maps-fleetengine/proto-google-maps-fleetengine-v1/src/main/java/com/google/maps/fleetengine/v1/TerminalPointId.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/maps/fleetengine/v1/fleetengine.proto
// Protobuf Java Version: 3.25.8
package com.google.maps.fleetengine.v1;
/**
*
*
* <pre>
* Deprecated: TerminalPoints are no longer supported in Fleet Engine. Use
* `TerminalLocation.point` instead.
* </pre>
*
* Protobuf type {@code maps.fleetengine.v1.TerminalPointId}
*/
@java.lang.Deprecated
public final class TerminalPointId extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:maps.fleetengine.v1.TerminalPointId)
TerminalPointIdOrBuilder {
private static final long serialVersionUID = 0L;
// Use TerminalPointId.newBuilder() to construct.
private TerminalPointId(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TerminalPointId() {
value_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new TerminalPointId();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.maps.fleetengine.v1.FleetEngine
.internal_static_maps_fleetengine_v1_TerminalPointId_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.maps.fleetengine.v1.FleetEngine
.internal_static_maps_fleetengine_v1_TerminalPointId_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.maps.fleetengine.v1.TerminalPointId.class,
com.google.maps.fleetengine.v1.TerminalPointId.Builder.class);
}
private int idCase_ = 0;
@SuppressWarnings("serial")
private java.lang.Object id_;
public enum IdCase
implements
com.google.protobuf.Internal.EnumLite,
com.google.protobuf.AbstractMessage.InternalOneOfEnum {
@java.lang.Deprecated
PLACE_ID(2),
@java.lang.Deprecated
GENERATED_ID(3),
ID_NOT_SET(0);
private final int value;
private IdCase(int value) {
this.value = value;
}
/**
* @param value The number of the enum to look for.
* @return The enum associated with the given number.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static IdCase valueOf(int value) {
return forNumber(value);
}
public static IdCase forNumber(int value) {
switch (value) {
case 2:
return PLACE_ID;
case 3:
return GENERATED_ID;
case 0:
return ID_NOT_SET;
default:
return null;
}
}
public int getNumber() {
return this.value;
}
};
public IdCase getIdCase() {
return IdCase.forNumber(idCase_);
}
public static final int PLACE_ID_FIELD_NUMBER = 2;
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string place_id = 2 [deprecated = true];</code>
*
* @deprecated maps.fleetengine.v1.TerminalPointId.place_id is deprecated. See
* google/maps/fleetengine/v1/fleetengine.proto;l=43
* @return Whether the placeId field is set.
*/
@java.lang.Deprecated
public boolean hasPlaceId() {
return idCase_ == 2;
}
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string place_id = 2 [deprecated = true];</code>
*
* @deprecated maps.fleetengine.v1.TerminalPointId.place_id is deprecated. See
* google/maps/fleetengine/v1/fleetengine.proto;l=43
* @return The placeId.
*/
@java.lang.Deprecated
public java.lang.String getPlaceId() {
java.lang.Object ref = "";
if (idCase_ == 2) {
ref = id_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (idCase_ == 2) {
id_ = s;
}
return s;
}
}
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string place_id = 2 [deprecated = true];</code>
*
* @deprecated maps.fleetengine.v1.TerminalPointId.place_id is deprecated. See
* google/maps/fleetengine/v1/fleetengine.proto;l=43
* @return The bytes for placeId.
*/
@java.lang.Deprecated
public com.google.protobuf.ByteString getPlaceIdBytes() {
java.lang.Object ref = "";
if (idCase_ == 2) {
ref = id_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (idCase_ == 2) {
id_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int GENERATED_ID_FIELD_NUMBER = 3;
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string generated_id = 3 [deprecated = true];</code>
*
* @deprecated maps.fleetengine.v1.TerminalPointId.generated_id is deprecated. See
* google/maps/fleetengine/v1/fleetengine.proto;l=46
* @return Whether the generatedId field is set.
*/
@java.lang.Deprecated
public boolean hasGeneratedId() {
return idCase_ == 3;
}
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string generated_id = 3 [deprecated = true];</code>
*
* @deprecated maps.fleetengine.v1.TerminalPointId.generated_id is deprecated. See
* google/maps/fleetengine/v1/fleetengine.proto;l=46
* @return The generatedId.
*/
@java.lang.Deprecated
public java.lang.String getGeneratedId() {
java.lang.Object ref = "";
if (idCase_ == 3) {
ref = id_;
}
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (idCase_ == 3) {
id_ = s;
}
return s;
}
}
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string generated_id = 3 [deprecated = true];</code>
*
* @deprecated maps.fleetengine.v1.TerminalPointId.generated_id is deprecated. See
* google/maps/fleetengine/v1/fleetengine.proto;l=46
* @return The bytes for generatedId.
*/
@java.lang.Deprecated
public com.google.protobuf.ByteString getGeneratedIdBytes() {
java.lang.Object ref = "";
if (idCase_ == 3) {
ref = id_;
}
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (idCase_ == 3) {
id_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int VALUE_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private volatile java.lang.Object value_ = "";
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string value = 4 [deprecated = true];</code>
*
* @deprecated maps.fleetengine.v1.TerminalPointId.value is deprecated. See
* google/maps/fleetengine/v1/fleetengine.proto;l=50
* @return The value.
*/
@java.lang.Override
@java.lang.Deprecated
public java.lang.String getValue() {
java.lang.Object ref = value_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
value_ = s;
return s;
}
}
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string value = 4 [deprecated = true];</code>
*
* @deprecated maps.fleetengine.v1.TerminalPointId.value is deprecated. See
* google/maps/fleetengine/v1/fleetengine.proto;l=50
* @return The bytes for value.
*/
@java.lang.Override
@java.lang.Deprecated
public com.google.protobuf.ByteString getValueBytes() {
java.lang.Object ref = value_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
value_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (idCase_ == 2) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, id_);
}
if (idCase_ == 3) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, id_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(value_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 4, value_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (idCase_ == 2) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, id_);
}
if (idCase_ == 3) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, id_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(value_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, value_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.maps.fleetengine.v1.TerminalPointId)) {
return super.equals(obj);
}
com.google.maps.fleetengine.v1.TerminalPointId other =
(com.google.maps.fleetengine.v1.TerminalPointId) obj;
if (!getValue().equals(other.getValue())) return false;
if (!getIdCase().equals(other.getIdCase())) return false;
switch (idCase_) {
case 2:
if (!getPlaceId().equals(other.getPlaceId())) return false;
break;
case 3:
if (!getGeneratedId().equals(other.getGeneratedId())) return false;
break;
case 0:
default:
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + VALUE_FIELD_NUMBER;
hash = (53 * hash) + getValue().hashCode();
switch (idCase_) {
case 2:
hash = (37 * hash) + PLACE_ID_FIELD_NUMBER;
hash = (53 * hash) + getPlaceId().hashCode();
break;
case 3:
hash = (37 * hash) + GENERATED_ID_FIELD_NUMBER;
hash = (53 * hash) + getGeneratedId().hashCode();
break;
case 0:
default:
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.maps.fleetengine.v1.TerminalPointId parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.maps.fleetengine.v1.TerminalPointId parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.maps.fleetengine.v1.TerminalPointId parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.maps.fleetengine.v1.TerminalPointId parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.maps.fleetengine.v1.TerminalPointId parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.maps.fleetengine.v1.TerminalPointId parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.maps.fleetengine.v1.TerminalPointId parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.maps.fleetengine.v1.TerminalPointId parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.maps.fleetengine.v1.TerminalPointId parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.maps.fleetengine.v1.TerminalPointId parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.maps.fleetengine.v1.TerminalPointId parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.maps.fleetengine.v1.TerminalPointId parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.maps.fleetengine.v1.TerminalPointId prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Deprecated: TerminalPoints are no longer supported in Fleet Engine. Use
* `TerminalLocation.point` instead.
* </pre>
*
* Protobuf type {@code maps.fleetengine.v1.TerminalPointId}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:maps.fleetengine.v1.TerminalPointId)
com.google.maps.fleetengine.v1.TerminalPointIdOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.maps.fleetengine.v1.FleetEngine
.internal_static_maps_fleetengine_v1_TerminalPointId_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.maps.fleetengine.v1.FleetEngine
.internal_static_maps_fleetengine_v1_TerminalPointId_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.maps.fleetengine.v1.TerminalPointId.class,
com.google.maps.fleetengine.v1.TerminalPointId.Builder.class);
}
// Construct using com.google.maps.fleetengine.v1.TerminalPointId.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
value_ = "";
idCase_ = 0;
id_ = null;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.maps.fleetengine.v1.FleetEngine
.internal_static_maps_fleetengine_v1_TerminalPointId_descriptor;
}
@java.lang.Override
public com.google.maps.fleetengine.v1.TerminalPointId getDefaultInstanceForType() {
return com.google.maps.fleetengine.v1.TerminalPointId.getDefaultInstance();
}
@java.lang.Override
public com.google.maps.fleetengine.v1.TerminalPointId build() {
com.google.maps.fleetengine.v1.TerminalPointId result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.maps.fleetengine.v1.TerminalPointId buildPartial() {
com.google.maps.fleetengine.v1.TerminalPointId result =
new com.google.maps.fleetengine.v1.TerminalPointId(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
buildPartialOneofs(result);
onBuilt();
return result;
}
private void buildPartial0(com.google.maps.fleetengine.v1.TerminalPointId result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.value_ = value_;
}
}
private void buildPartialOneofs(com.google.maps.fleetengine.v1.TerminalPointId result) {
result.idCase_ = idCase_;
result.id_ = this.id_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.maps.fleetengine.v1.TerminalPointId) {
return mergeFrom((com.google.maps.fleetengine.v1.TerminalPointId) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.maps.fleetengine.v1.TerminalPointId other) {
if (other == com.google.maps.fleetengine.v1.TerminalPointId.getDefaultInstance()) return this;
if (!other.getValue().isEmpty()) {
value_ = other.value_;
bitField0_ |= 0x00000004;
onChanged();
}
switch (other.getIdCase()) {
case PLACE_ID:
{
idCase_ = 2;
id_ = other.id_;
onChanged();
break;
}
case GENERATED_ID:
{
idCase_ = 3;
id_ = other.id_;
onChanged();
break;
}
case ID_NOT_SET:
{
break;
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
idCase_ = 2;
id_ = s;
break;
} // case 18
case 26:
{
java.lang.String s = input.readStringRequireUtf8();
idCase_ = 3;
id_ = s;
break;
} // case 26
case 34:
{
value_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int idCase_ = 0;
private java.lang.Object id_;
public IdCase getIdCase() {
return IdCase.forNumber(idCase_);
}
public Builder clearId() {
idCase_ = 0;
id_ = null;
onChanged();
return this;
}
private int bitField0_;
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string place_id = 2 [deprecated = true];</code>
*
* @deprecated maps.fleetengine.v1.TerminalPointId.place_id is deprecated. See
* google/maps/fleetengine/v1/fleetengine.proto;l=43
* @return Whether the placeId field is set.
*/
@java.lang.Override
@java.lang.Deprecated
public boolean hasPlaceId() {
return idCase_ == 2;
}
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string place_id = 2 [deprecated = true];</code>
*
* @deprecated maps.fleetengine.v1.TerminalPointId.place_id is deprecated. See
* google/maps/fleetengine/v1/fleetengine.proto;l=43
* @return The placeId.
*/
@java.lang.Override
@java.lang.Deprecated
public java.lang.String getPlaceId() {
java.lang.Object ref = "";
if (idCase_ == 2) {
ref = id_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (idCase_ == 2) {
id_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string place_id = 2 [deprecated = true];</code>
*
* @deprecated maps.fleetengine.v1.TerminalPointId.place_id is deprecated. See
* google/maps/fleetengine/v1/fleetengine.proto;l=43
* @return The bytes for placeId.
*/
@java.lang.Override
@java.lang.Deprecated
public com.google.protobuf.ByteString getPlaceIdBytes() {
java.lang.Object ref = "";
if (idCase_ == 2) {
ref = id_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (idCase_ == 2) {
id_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string place_id = 2 [deprecated = true];</code>
*
* @deprecated maps.fleetengine.v1.TerminalPointId.place_id is deprecated. See
* google/maps/fleetengine/v1/fleetengine.proto;l=43
* @param value The placeId to set.
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder setPlaceId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
idCase_ = 2;
id_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string place_id = 2 [deprecated = true];</code>
*
* @deprecated maps.fleetengine.v1.TerminalPointId.place_id is deprecated. See
* google/maps/fleetengine/v1/fleetengine.proto;l=43
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder clearPlaceId() {
if (idCase_ == 2) {
idCase_ = 0;
id_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string place_id = 2 [deprecated = true];</code>
*
* @deprecated maps.fleetengine.v1.TerminalPointId.place_id is deprecated. See
* google/maps/fleetengine/v1/fleetengine.proto;l=43
* @param value The bytes for placeId to set.
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder setPlaceIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
idCase_ = 2;
id_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string generated_id = 3 [deprecated = true];</code>
*
* @deprecated maps.fleetengine.v1.TerminalPointId.generated_id is deprecated. See
* google/maps/fleetengine/v1/fleetengine.proto;l=46
* @return Whether the generatedId field is set.
*/
@java.lang.Override
@java.lang.Deprecated
public boolean hasGeneratedId() {
return idCase_ == 3;
}
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string generated_id = 3 [deprecated = true];</code>
*
* @deprecated maps.fleetengine.v1.TerminalPointId.generated_id is deprecated. See
* google/maps/fleetengine/v1/fleetengine.proto;l=46
* @return The generatedId.
*/
@java.lang.Override
@java.lang.Deprecated
public java.lang.String getGeneratedId() {
java.lang.Object ref = "";
if (idCase_ == 3) {
ref = id_;
}
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
if (idCase_ == 3) {
id_ = s;
}
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string generated_id = 3 [deprecated = true];</code>
*
* @deprecated maps.fleetengine.v1.TerminalPointId.generated_id is deprecated. See
* google/maps/fleetengine/v1/fleetengine.proto;l=46
* @return The bytes for generatedId.
*/
@java.lang.Override
@java.lang.Deprecated
public com.google.protobuf.ByteString getGeneratedIdBytes() {
java.lang.Object ref = "";
if (idCase_ == 3) {
ref = id_;
}
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
if (idCase_ == 3) {
id_ = b;
}
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string generated_id = 3 [deprecated = true];</code>
*
* @deprecated maps.fleetengine.v1.TerminalPointId.generated_id is deprecated. See
* google/maps/fleetengine/v1/fleetengine.proto;l=46
* @param value The generatedId to set.
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder setGeneratedId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
idCase_ = 3;
id_ = value;
onChanged();
return this;
}
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string generated_id = 3 [deprecated = true];</code>
*
* @deprecated maps.fleetengine.v1.TerminalPointId.generated_id is deprecated. See
* google/maps/fleetengine/v1/fleetengine.proto;l=46
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder clearGeneratedId() {
if (idCase_ == 3) {
idCase_ = 0;
id_ = null;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string generated_id = 3 [deprecated = true];</code>
*
* @deprecated maps.fleetengine.v1.TerminalPointId.generated_id is deprecated. See
* google/maps/fleetengine/v1/fleetengine.proto;l=46
* @param value The bytes for generatedId to set.
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder setGeneratedIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
idCase_ = 3;
id_ = value;
onChanged();
return this;
}
private java.lang.Object value_ = "";
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string value = 4 [deprecated = true];</code>
*
* @deprecated maps.fleetengine.v1.TerminalPointId.value is deprecated. See
* google/maps/fleetengine/v1/fleetengine.proto;l=50
* @return The value.
*/
@java.lang.Deprecated
public java.lang.String getValue() {
java.lang.Object ref = value_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
value_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string value = 4 [deprecated = true];</code>
*
* @deprecated maps.fleetengine.v1.TerminalPointId.value is deprecated. See
* google/maps/fleetengine/v1/fleetengine.proto;l=50
* @return The bytes for value.
*/
@java.lang.Deprecated
public com.google.protobuf.ByteString getValueBytes() {
java.lang.Object ref = value_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
value_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string value = 4 [deprecated = true];</code>
*
* @deprecated maps.fleetengine.v1.TerminalPointId.value is deprecated. See
* google/maps/fleetengine/v1/fleetengine.proto;l=50
* @param value The value to set.
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder setValue(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
value_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string value = 4 [deprecated = true];</code>
*
* @deprecated maps.fleetengine.v1.TerminalPointId.value is deprecated. See
* google/maps/fleetengine/v1/fleetengine.proto;l=50
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder clearValue() {
value_ = getDefaultInstance().getValue();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Deprecated.
* </pre>
*
* <code>string value = 4 [deprecated = true];</code>
*
* @deprecated maps.fleetengine.v1.TerminalPointId.value is deprecated. See
* google/maps/fleetengine/v1/fleetengine.proto;l=50
* @param value The bytes for value to set.
* @return This builder for chaining.
*/
@java.lang.Deprecated
public Builder setValueBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
value_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:maps.fleetengine.v1.TerminalPointId)
}
// @@protoc_insertion_point(class_scope:maps.fleetengine.v1.TerminalPointId)
private static final com.google.maps.fleetengine.v1.TerminalPointId DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.maps.fleetengine.v1.TerminalPointId();
}
public static com.google.maps.fleetengine.v1.TerminalPointId getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<TerminalPointId> PARSER =
new com.google.protobuf.AbstractParser<TerminalPointId>() {
@java.lang.Override
public TerminalPointId parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<TerminalPointId> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<TerminalPointId> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.maps.fleetengine.v1.TerminalPointId getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,085 | java-dlp/proto-google-cloud-dlp-v2/src/main/java/com/google/privacy/dlp/v2/DiscoveryCloudStorageGenerationCadence.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/privacy/dlp/v2/dlp.proto
// Protobuf Java Version: 3.25.8
package com.google.privacy.dlp.v2;
/**
*
*
* <pre>
* How often existing buckets should have their profiles refreshed.
* New buckets are scanned as quickly as possible depending on system
* capacity.
* </pre>
*
* Protobuf type {@code google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence}
*/
public final class DiscoveryCloudStorageGenerationCadence
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence)
DiscoveryCloudStorageGenerationCadenceOrBuilder {
private static final long serialVersionUID = 0L;
// Use DiscoveryCloudStorageGenerationCadence.newBuilder() to construct.
private DiscoveryCloudStorageGenerationCadence(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private DiscoveryCloudStorageGenerationCadence() {
refreshFrequency_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new DiscoveryCloudStorageGenerationCadence();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_DiscoveryCloudStorageGenerationCadence_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_DiscoveryCloudStorageGenerationCadence_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence.class,
com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence.Builder.class);
}
private int bitField0_;
public static final int REFRESH_FREQUENCY_FIELD_NUMBER = 1;
private int refreshFrequency_ = 0;
/**
*
*
* <pre>
* Optional. Data changes in Cloud Storage can't trigger reprofiling. If you
* set this field, profiles are refreshed at this frequency regardless of
* whether the underlying buckets have changed. Defaults to never.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DataProfileUpdateFrequency refresh_frequency = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The enum numeric value on the wire for refreshFrequency.
*/
@java.lang.Override
public int getRefreshFrequencyValue() {
return refreshFrequency_;
}
/**
*
*
* <pre>
* Optional. Data changes in Cloud Storage can't trigger reprofiling. If you
* set this field, profiles are refreshed at this frequency regardless of
* whether the underlying buckets have changed. Defaults to never.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DataProfileUpdateFrequency refresh_frequency = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The refreshFrequency.
*/
@java.lang.Override
public com.google.privacy.dlp.v2.DataProfileUpdateFrequency getRefreshFrequency() {
com.google.privacy.dlp.v2.DataProfileUpdateFrequency result =
com.google.privacy.dlp.v2.DataProfileUpdateFrequency.forNumber(refreshFrequency_);
return result == null
? com.google.privacy.dlp.v2.DataProfileUpdateFrequency.UNRECOGNIZED
: result;
}
public static final int INSPECT_TEMPLATE_MODIFIED_CADENCE_FIELD_NUMBER = 2;
private com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence
inspectTemplateModifiedCadence_;
/**
*
*
* <pre>
* Optional. Governs when to update data profiles when the inspection rules
* defined by the `InspectTemplate` change.
* If not set, changing the template will not cause a data profile to update.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence inspect_template_modified_cadence = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the inspectTemplateModifiedCadence field is set.
*/
@java.lang.Override
public boolean hasInspectTemplateModifiedCadence() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. Governs when to update data profiles when the inspection rules
* defined by the `InspectTemplate` change.
* If not set, changing the template will not cause a data profile to update.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence inspect_template_modified_cadence = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The inspectTemplateModifiedCadence.
*/
@java.lang.Override
public com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence
getInspectTemplateModifiedCadence() {
return inspectTemplateModifiedCadence_ == null
? com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence.getDefaultInstance()
: inspectTemplateModifiedCadence_;
}
/**
*
*
* <pre>
* Optional. Governs when to update data profiles when the inspection rules
* defined by the `InspectTemplate` change.
* If not set, changing the template will not cause a data profile to update.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence inspect_template_modified_cadence = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadenceOrBuilder
getInspectTemplateModifiedCadenceOrBuilder() {
return inspectTemplateModifiedCadence_ == null
? com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence.getDefaultInstance()
: inspectTemplateModifiedCadence_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (refreshFrequency_
!= com.google.privacy.dlp.v2.DataProfileUpdateFrequency.UPDATE_FREQUENCY_UNSPECIFIED
.getNumber()) {
output.writeEnum(1, refreshFrequency_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getInspectTemplateModifiedCadence());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (refreshFrequency_
!= com.google.privacy.dlp.v2.DataProfileUpdateFrequency.UPDATE_FREQUENCY_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, refreshFrequency_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size +=
com.google.protobuf.CodedOutputStream.computeMessageSize(
2, getInspectTemplateModifiedCadence());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence)) {
return super.equals(obj);
}
com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence other =
(com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence) obj;
if (refreshFrequency_ != other.refreshFrequency_) return false;
if (hasInspectTemplateModifiedCadence() != other.hasInspectTemplateModifiedCadence())
return false;
if (hasInspectTemplateModifiedCadence()) {
if (!getInspectTemplateModifiedCadence().equals(other.getInspectTemplateModifiedCadence()))
return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + REFRESH_FREQUENCY_FIELD_NUMBER;
hash = (53 * hash) + refreshFrequency_;
if (hasInspectTemplateModifiedCadence()) {
hash = (37 * hash) + INSPECT_TEMPLATE_MODIFIED_CADENCE_FIELD_NUMBER;
hash = (53 * hash) + getInspectTemplateModifiedCadence().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* How often existing buckets should have their profiles refreshed.
* New buckets are scanned as quickly as possible depending on system
* capacity.
* </pre>
*
* Protobuf type {@code google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence)
com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadenceOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_DiscoveryCloudStorageGenerationCadence_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_DiscoveryCloudStorageGenerationCadence_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence.class,
com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence.Builder.class);
}
// Construct using com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getInspectTemplateModifiedCadenceFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
refreshFrequency_ = 0;
inspectTemplateModifiedCadence_ = null;
if (inspectTemplateModifiedCadenceBuilder_ != null) {
inspectTemplateModifiedCadenceBuilder_.dispose();
inspectTemplateModifiedCadenceBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.privacy.dlp.v2.DlpProto
.internal_static_google_privacy_dlp_v2_DiscoveryCloudStorageGenerationCadence_descriptor;
}
@java.lang.Override
public com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence
getDefaultInstanceForType() {
return com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence.getDefaultInstance();
}
@java.lang.Override
public com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence build() {
com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence buildPartial() {
com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence result =
new com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.refreshFrequency_ = refreshFrequency_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.inspectTemplateModifiedCadence_ =
inspectTemplateModifiedCadenceBuilder_ == null
? inspectTemplateModifiedCadence_
: inspectTemplateModifiedCadenceBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence) {
return mergeFrom((com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence other) {
if (other
== com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence.getDefaultInstance())
return this;
if (other.refreshFrequency_ != 0) {
setRefreshFrequencyValue(other.getRefreshFrequencyValue());
}
if (other.hasInspectTemplateModifiedCadence()) {
mergeInspectTemplateModifiedCadence(other.getInspectTemplateModifiedCadence());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
refreshFrequency_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18:
{
input.readMessage(
getInspectTemplateModifiedCadenceFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int refreshFrequency_ = 0;
/**
*
*
* <pre>
* Optional. Data changes in Cloud Storage can't trigger reprofiling. If you
* set this field, profiles are refreshed at this frequency regardless of
* whether the underlying buckets have changed. Defaults to never.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DataProfileUpdateFrequency refresh_frequency = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The enum numeric value on the wire for refreshFrequency.
*/
@java.lang.Override
public int getRefreshFrequencyValue() {
return refreshFrequency_;
}
/**
*
*
* <pre>
* Optional. Data changes in Cloud Storage can't trigger reprofiling. If you
* set this field, profiles are refreshed at this frequency regardless of
* whether the underlying buckets have changed. Defaults to never.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DataProfileUpdateFrequency refresh_frequency = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param value The enum numeric value on the wire for refreshFrequency to set.
* @return This builder for chaining.
*/
public Builder setRefreshFrequencyValue(int value) {
refreshFrequency_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Data changes in Cloud Storage can't trigger reprofiling. If you
* set this field, profiles are refreshed at this frequency regardless of
* whether the underlying buckets have changed. Defaults to never.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DataProfileUpdateFrequency refresh_frequency = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The refreshFrequency.
*/
@java.lang.Override
public com.google.privacy.dlp.v2.DataProfileUpdateFrequency getRefreshFrequency() {
com.google.privacy.dlp.v2.DataProfileUpdateFrequency result =
com.google.privacy.dlp.v2.DataProfileUpdateFrequency.forNumber(refreshFrequency_);
return result == null
? com.google.privacy.dlp.v2.DataProfileUpdateFrequency.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Optional. Data changes in Cloud Storage can't trigger reprofiling. If you
* set this field, profiles are refreshed at this frequency regardless of
* whether the underlying buckets have changed. Defaults to never.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DataProfileUpdateFrequency refresh_frequency = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param value The refreshFrequency to set.
* @return This builder for chaining.
*/
public Builder setRefreshFrequency(com.google.privacy.dlp.v2.DataProfileUpdateFrequency value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
refreshFrequency_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Data changes in Cloud Storage can't trigger reprofiling. If you
* set this field, profiles are refreshed at this frequency regardless of
* whether the underlying buckets have changed. Defaults to never.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DataProfileUpdateFrequency refresh_frequency = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearRefreshFrequency() {
bitField0_ = (bitField0_ & ~0x00000001);
refreshFrequency_ = 0;
onChanged();
return this;
}
private com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence
inspectTemplateModifiedCadence_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence,
com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence.Builder,
com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadenceOrBuilder>
inspectTemplateModifiedCadenceBuilder_;
/**
*
*
* <pre>
* Optional. Governs when to update data profiles when the inspection rules
* defined by the `InspectTemplate` change.
* If not set, changing the template will not cause a data profile to update.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence inspect_template_modified_cadence = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the inspectTemplateModifiedCadence field is set.
*/
public boolean hasInspectTemplateModifiedCadence() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. Governs when to update data profiles when the inspection rules
* defined by the `InspectTemplate` change.
* If not set, changing the template will not cause a data profile to update.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence inspect_template_modified_cadence = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The inspectTemplateModifiedCadence.
*/
public com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence
getInspectTemplateModifiedCadence() {
if (inspectTemplateModifiedCadenceBuilder_ == null) {
return inspectTemplateModifiedCadence_ == null
? com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence.getDefaultInstance()
: inspectTemplateModifiedCadence_;
} else {
return inspectTemplateModifiedCadenceBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. Governs when to update data profiles when the inspection rules
* defined by the `InspectTemplate` change.
* If not set, changing the template will not cause a data profile to update.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence inspect_template_modified_cadence = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setInspectTemplateModifiedCadence(
com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence value) {
if (inspectTemplateModifiedCadenceBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
inspectTemplateModifiedCadence_ = value;
} else {
inspectTemplateModifiedCadenceBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Governs when to update data profiles when the inspection rules
* defined by the `InspectTemplate` change.
* If not set, changing the template will not cause a data profile to update.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence inspect_template_modified_cadence = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setInspectTemplateModifiedCadence(
com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence.Builder builderForValue) {
if (inspectTemplateModifiedCadenceBuilder_ == null) {
inspectTemplateModifiedCadence_ = builderForValue.build();
} else {
inspectTemplateModifiedCadenceBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Governs when to update data profiles when the inspection rules
* defined by the `InspectTemplate` change.
* If not set, changing the template will not cause a data profile to update.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence inspect_template_modified_cadence = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeInspectTemplateModifiedCadence(
com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence value) {
if (inspectTemplateModifiedCadenceBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& inspectTemplateModifiedCadence_ != null
&& inspectTemplateModifiedCadence_
!= com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence
.getDefaultInstance()) {
getInspectTemplateModifiedCadenceBuilder().mergeFrom(value);
} else {
inspectTemplateModifiedCadence_ = value;
}
} else {
inspectTemplateModifiedCadenceBuilder_.mergeFrom(value);
}
if (inspectTemplateModifiedCadence_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. Governs when to update data profiles when the inspection rules
* defined by the `InspectTemplate` change.
* If not set, changing the template will not cause a data profile to update.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence inspect_template_modified_cadence = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearInspectTemplateModifiedCadence() {
bitField0_ = (bitField0_ & ~0x00000002);
inspectTemplateModifiedCadence_ = null;
if (inspectTemplateModifiedCadenceBuilder_ != null) {
inspectTemplateModifiedCadenceBuilder_.dispose();
inspectTemplateModifiedCadenceBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Governs when to update data profiles when the inspection rules
* defined by the `InspectTemplate` change.
* If not set, changing the template will not cause a data profile to update.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence inspect_template_modified_cadence = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence.Builder
getInspectTemplateModifiedCadenceBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getInspectTemplateModifiedCadenceFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. Governs when to update data profiles when the inspection rules
* defined by the `InspectTemplate` change.
* If not set, changing the template will not cause a data profile to update.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence inspect_template_modified_cadence = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadenceOrBuilder
getInspectTemplateModifiedCadenceOrBuilder() {
if (inspectTemplateModifiedCadenceBuilder_ != null) {
return inspectTemplateModifiedCadenceBuilder_.getMessageOrBuilder();
} else {
return inspectTemplateModifiedCadence_ == null
? com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence.getDefaultInstance()
: inspectTemplateModifiedCadence_;
}
}
/**
*
*
* <pre>
* Optional. Governs when to update data profiles when the inspection rules
* defined by the `InspectTemplate` change.
* If not set, changing the template will not cause a data profile to update.
* </pre>
*
* <code>
* .google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence inspect_template_modified_cadence = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence,
com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence.Builder,
com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadenceOrBuilder>
getInspectTemplateModifiedCadenceFieldBuilder() {
if (inspectTemplateModifiedCadenceBuilder_ == null) {
inspectTemplateModifiedCadenceBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence,
com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadence.Builder,
com.google.privacy.dlp.v2.DiscoveryInspectTemplateModifiedCadenceOrBuilder>(
getInspectTemplateModifiedCadence(), getParentForChildren(), isClean());
inspectTemplateModifiedCadence_ = null;
}
return inspectTemplateModifiedCadenceBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence)
}
// @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence)
private static final com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence();
}
public static com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<DiscoveryCloudStorageGenerationCadence> PARSER =
new com.google.protobuf.AbstractParser<DiscoveryCloudStorageGenerationCadence>() {
@java.lang.Override
public DiscoveryCloudStorageGenerationCadence parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<DiscoveryCloudStorageGenerationCadence> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<DiscoveryCloudStorageGenerationCadence> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.privacy.dlp.v2.DiscoveryCloudStorageGenerationCadence
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,898 | java-securesourcemanager/proto-google-cloud-securesourcemanager-v1/src/main/java/com/google/cloud/securesourcemanager/v1/FetchTreeResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/securesourcemanager/v1/secure_source_manager.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.securesourcemanager.v1;
/**
*
*
* <pre>
* Response message containing a list of TreeEntry objects.
* </pre>
*
* Protobuf type {@code google.cloud.securesourcemanager.v1.FetchTreeResponse}
*/
public final class FetchTreeResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.securesourcemanager.v1.FetchTreeResponse)
FetchTreeResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use FetchTreeResponse.newBuilder() to construct.
private FetchTreeResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private FetchTreeResponse() {
treeEntries_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new FetchTreeResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_FetchTreeResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_FetchTreeResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securesourcemanager.v1.FetchTreeResponse.class,
com.google.cloud.securesourcemanager.v1.FetchTreeResponse.Builder.class);
}
public static final int TREE_ENTRIES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.securesourcemanager.v1.TreeEntry> treeEntries_;
/**
*
*
* <pre>
* The list of TreeEntry objects.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.TreeEntry tree_entries = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.securesourcemanager.v1.TreeEntry> getTreeEntriesList() {
return treeEntries_;
}
/**
*
*
* <pre>
* The list of TreeEntry objects.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.TreeEntry tree_entries = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.securesourcemanager.v1.TreeEntryOrBuilder>
getTreeEntriesOrBuilderList() {
return treeEntries_;
}
/**
*
*
* <pre>
* The list of TreeEntry objects.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.TreeEntry tree_entries = 1;</code>
*/
@java.lang.Override
public int getTreeEntriesCount() {
return treeEntries_.size();
}
/**
*
*
* <pre>
* The list of TreeEntry objects.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.TreeEntry tree_entries = 1;</code>
*/
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.TreeEntry getTreeEntries(int index) {
return treeEntries_.get(index);
}
/**
*
*
* <pre>
* The list of TreeEntry objects.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.TreeEntry tree_entries = 1;</code>
*/
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.TreeEntryOrBuilder getTreeEntriesOrBuilder(
int index) {
return treeEntries_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < treeEntries_.size(); i++) {
output.writeMessage(1, treeEntries_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < treeEntries_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, treeEntries_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.securesourcemanager.v1.FetchTreeResponse)) {
return super.equals(obj);
}
com.google.cloud.securesourcemanager.v1.FetchTreeResponse other =
(com.google.cloud.securesourcemanager.v1.FetchTreeResponse) obj;
if (!getTreeEntriesList().equals(other.getTreeEntriesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getTreeEntriesCount() > 0) {
hash = (37 * hash) + TREE_ENTRIES_FIELD_NUMBER;
hash = (53 * hash) + getTreeEntriesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.securesourcemanager.v1.FetchTreeResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securesourcemanager.v1.FetchTreeResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.FetchTreeResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securesourcemanager.v1.FetchTreeResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.FetchTreeResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.securesourcemanager.v1.FetchTreeResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.FetchTreeResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securesourcemanager.v1.FetchTreeResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.FetchTreeResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.securesourcemanager.v1.FetchTreeResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.securesourcemanager.v1.FetchTreeResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.securesourcemanager.v1.FetchTreeResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.securesourcemanager.v1.FetchTreeResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message containing a list of TreeEntry objects.
* </pre>
*
* Protobuf type {@code google.cloud.securesourcemanager.v1.FetchTreeResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.securesourcemanager.v1.FetchTreeResponse)
com.google.cloud.securesourcemanager.v1.FetchTreeResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_FetchTreeResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_FetchTreeResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.securesourcemanager.v1.FetchTreeResponse.class,
com.google.cloud.securesourcemanager.v1.FetchTreeResponse.Builder.class);
}
// Construct using com.google.cloud.securesourcemanager.v1.FetchTreeResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (treeEntriesBuilder_ == null) {
treeEntries_ = java.util.Collections.emptyList();
} else {
treeEntries_ = null;
treeEntriesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.securesourcemanager.v1.SecureSourceManagerProto
.internal_static_google_cloud_securesourcemanager_v1_FetchTreeResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.FetchTreeResponse getDefaultInstanceForType() {
return com.google.cloud.securesourcemanager.v1.FetchTreeResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.FetchTreeResponse build() {
com.google.cloud.securesourcemanager.v1.FetchTreeResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.FetchTreeResponse buildPartial() {
com.google.cloud.securesourcemanager.v1.FetchTreeResponse result =
new com.google.cloud.securesourcemanager.v1.FetchTreeResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.securesourcemanager.v1.FetchTreeResponse result) {
if (treeEntriesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
treeEntries_ = java.util.Collections.unmodifiableList(treeEntries_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.treeEntries_ = treeEntries_;
} else {
result.treeEntries_ = treeEntriesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.securesourcemanager.v1.FetchTreeResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.securesourcemanager.v1.FetchTreeResponse) {
return mergeFrom((com.google.cloud.securesourcemanager.v1.FetchTreeResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.securesourcemanager.v1.FetchTreeResponse other) {
if (other == com.google.cloud.securesourcemanager.v1.FetchTreeResponse.getDefaultInstance())
return this;
if (treeEntriesBuilder_ == null) {
if (!other.treeEntries_.isEmpty()) {
if (treeEntries_.isEmpty()) {
treeEntries_ = other.treeEntries_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureTreeEntriesIsMutable();
treeEntries_.addAll(other.treeEntries_);
}
onChanged();
}
} else {
if (!other.treeEntries_.isEmpty()) {
if (treeEntriesBuilder_.isEmpty()) {
treeEntriesBuilder_.dispose();
treeEntriesBuilder_ = null;
treeEntries_ = other.treeEntries_;
bitField0_ = (bitField0_ & ~0x00000001);
treeEntriesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getTreeEntriesFieldBuilder()
: null;
} else {
treeEntriesBuilder_.addAllMessages(other.treeEntries_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.securesourcemanager.v1.TreeEntry m =
input.readMessage(
com.google.cloud.securesourcemanager.v1.TreeEntry.parser(),
extensionRegistry);
if (treeEntriesBuilder_ == null) {
ensureTreeEntriesIsMutable();
treeEntries_.add(m);
} else {
treeEntriesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.securesourcemanager.v1.TreeEntry> treeEntries_ =
java.util.Collections.emptyList();
private void ensureTreeEntriesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
treeEntries_ =
new java.util.ArrayList<com.google.cloud.securesourcemanager.v1.TreeEntry>(
treeEntries_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.securesourcemanager.v1.TreeEntry,
com.google.cloud.securesourcemanager.v1.TreeEntry.Builder,
com.google.cloud.securesourcemanager.v1.TreeEntryOrBuilder>
treeEntriesBuilder_;
/**
*
*
* <pre>
* The list of TreeEntry objects.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.TreeEntry tree_entries = 1;</code>
*/
public java.util.List<com.google.cloud.securesourcemanager.v1.TreeEntry> getTreeEntriesList() {
if (treeEntriesBuilder_ == null) {
return java.util.Collections.unmodifiableList(treeEntries_);
} else {
return treeEntriesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of TreeEntry objects.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.TreeEntry tree_entries = 1;</code>
*/
public int getTreeEntriesCount() {
if (treeEntriesBuilder_ == null) {
return treeEntries_.size();
} else {
return treeEntriesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of TreeEntry objects.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.TreeEntry tree_entries = 1;</code>
*/
public com.google.cloud.securesourcemanager.v1.TreeEntry getTreeEntries(int index) {
if (treeEntriesBuilder_ == null) {
return treeEntries_.get(index);
} else {
return treeEntriesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of TreeEntry objects.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.TreeEntry tree_entries = 1;</code>
*/
public Builder setTreeEntries(
int index, com.google.cloud.securesourcemanager.v1.TreeEntry value) {
if (treeEntriesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTreeEntriesIsMutable();
treeEntries_.set(index, value);
onChanged();
} else {
treeEntriesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of TreeEntry objects.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.TreeEntry tree_entries = 1;</code>
*/
public Builder setTreeEntries(
int index, com.google.cloud.securesourcemanager.v1.TreeEntry.Builder builderForValue) {
if (treeEntriesBuilder_ == null) {
ensureTreeEntriesIsMutable();
treeEntries_.set(index, builderForValue.build());
onChanged();
} else {
treeEntriesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of TreeEntry objects.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.TreeEntry tree_entries = 1;</code>
*/
public Builder addTreeEntries(com.google.cloud.securesourcemanager.v1.TreeEntry value) {
if (treeEntriesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTreeEntriesIsMutable();
treeEntries_.add(value);
onChanged();
} else {
treeEntriesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of TreeEntry objects.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.TreeEntry tree_entries = 1;</code>
*/
public Builder addTreeEntries(
int index, com.google.cloud.securesourcemanager.v1.TreeEntry value) {
if (treeEntriesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureTreeEntriesIsMutable();
treeEntries_.add(index, value);
onChanged();
} else {
treeEntriesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of TreeEntry objects.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.TreeEntry tree_entries = 1;</code>
*/
public Builder addTreeEntries(
com.google.cloud.securesourcemanager.v1.TreeEntry.Builder builderForValue) {
if (treeEntriesBuilder_ == null) {
ensureTreeEntriesIsMutable();
treeEntries_.add(builderForValue.build());
onChanged();
} else {
treeEntriesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of TreeEntry objects.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.TreeEntry tree_entries = 1;</code>
*/
public Builder addTreeEntries(
int index, com.google.cloud.securesourcemanager.v1.TreeEntry.Builder builderForValue) {
if (treeEntriesBuilder_ == null) {
ensureTreeEntriesIsMutable();
treeEntries_.add(index, builderForValue.build());
onChanged();
} else {
treeEntriesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of TreeEntry objects.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.TreeEntry tree_entries = 1;</code>
*/
public Builder addAllTreeEntries(
java.lang.Iterable<? extends com.google.cloud.securesourcemanager.v1.TreeEntry> values) {
if (treeEntriesBuilder_ == null) {
ensureTreeEntriesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, treeEntries_);
onChanged();
} else {
treeEntriesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of TreeEntry objects.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.TreeEntry tree_entries = 1;</code>
*/
public Builder clearTreeEntries() {
if (treeEntriesBuilder_ == null) {
treeEntries_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
treeEntriesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of TreeEntry objects.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.TreeEntry tree_entries = 1;</code>
*/
public Builder removeTreeEntries(int index) {
if (treeEntriesBuilder_ == null) {
ensureTreeEntriesIsMutable();
treeEntries_.remove(index);
onChanged();
} else {
treeEntriesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of TreeEntry objects.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.TreeEntry tree_entries = 1;</code>
*/
public com.google.cloud.securesourcemanager.v1.TreeEntry.Builder getTreeEntriesBuilder(
int index) {
return getTreeEntriesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of TreeEntry objects.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.TreeEntry tree_entries = 1;</code>
*/
public com.google.cloud.securesourcemanager.v1.TreeEntryOrBuilder getTreeEntriesOrBuilder(
int index) {
if (treeEntriesBuilder_ == null) {
return treeEntries_.get(index);
} else {
return treeEntriesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of TreeEntry objects.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.TreeEntry tree_entries = 1;</code>
*/
public java.util.List<? extends com.google.cloud.securesourcemanager.v1.TreeEntryOrBuilder>
getTreeEntriesOrBuilderList() {
if (treeEntriesBuilder_ != null) {
return treeEntriesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(treeEntries_);
}
}
/**
*
*
* <pre>
* The list of TreeEntry objects.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.TreeEntry tree_entries = 1;</code>
*/
public com.google.cloud.securesourcemanager.v1.TreeEntry.Builder addTreeEntriesBuilder() {
return getTreeEntriesFieldBuilder()
.addBuilder(com.google.cloud.securesourcemanager.v1.TreeEntry.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of TreeEntry objects.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.TreeEntry tree_entries = 1;</code>
*/
public com.google.cloud.securesourcemanager.v1.TreeEntry.Builder addTreeEntriesBuilder(
int index) {
return getTreeEntriesFieldBuilder()
.addBuilder(
index, com.google.cloud.securesourcemanager.v1.TreeEntry.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of TreeEntry objects.
* </pre>
*
* <code>repeated .google.cloud.securesourcemanager.v1.TreeEntry tree_entries = 1;</code>
*/
public java.util.List<com.google.cloud.securesourcemanager.v1.TreeEntry.Builder>
getTreeEntriesBuilderList() {
return getTreeEntriesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.securesourcemanager.v1.TreeEntry,
com.google.cloud.securesourcemanager.v1.TreeEntry.Builder,
com.google.cloud.securesourcemanager.v1.TreeEntryOrBuilder>
getTreeEntriesFieldBuilder() {
if (treeEntriesBuilder_ == null) {
treeEntriesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.securesourcemanager.v1.TreeEntry,
com.google.cloud.securesourcemanager.v1.TreeEntry.Builder,
com.google.cloud.securesourcemanager.v1.TreeEntryOrBuilder>(
treeEntries_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
treeEntries_ = null;
}
return treeEntriesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token identifying a page of results the server should return.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.securesourcemanager.v1.FetchTreeResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.securesourcemanager.v1.FetchTreeResponse)
private static final com.google.cloud.securesourcemanager.v1.FetchTreeResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.securesourcemanager.v1.FetchTreeResponse();
}
public static com.google.cloud.securesourcemanager.v1.FetchTreeResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<FetchTreeResponse> PARSER =
new com.google.protobuf.AbstractParser<FetchTreeResponse>() {
@java.lang.Override
public FetchTreeResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<FetchTreeResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<FetchTreeResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.securesourcemanager.v1.FetchTreeResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/seatunnel | 36,049 | seatunnel-e2e/seatunnel-engine-e2e/connector-seatunnel-e2e-base/src/test/java/org/apache/seatunnel/engine/e2e/telemetry/MasterWorkerClusterSeaTunnelWithTelemetryIT.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.seatunnel.engine.e2e.telemetry;
import org.apache.seatunnel.e2e.common.container.seatunnel.SeaTunnelContainer;
import org.apache.seatunnel.e2e.common.util.ContainerUtil;
import org.apache.seatunnel.engine.server.rest.RestConstant;
import org.awaitility.Awaitility;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Assertions;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.testcontainers.containers.GenericContainer;
import org.testcontainers.containers.Network;
import org.testcontainers.containers.output.Slf4jLogConsumer;
import org.testcontainers.containers.wait.strategy.Wait;
import org.testcontainers.utility.DockerLoggerFactory;
import org.testcontainers.utility.MountableFile;
import io.restassured.response.Response;
import io.restassured.response.ValidatableResponse;
import java.io.IOException;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.Arrays;
import java.util.concurrent.TimeUnit;
import static io.restassured.RestAssured.given;
import static org.apache.seatunnel.e2e.common.util.ContainerUtil.PROJECT_ROOT_PATH;
import static org.apache.seatunnel.engine.server.rest.RestConstant.CONTEXT_PATH;
import static org.hamcrest.CoreMatchers.containsString;
import static org.hamcrest.Matchers.equalTo;
import static org.hamcrest.Matchers.matchesRegex;
public class MasterWorkerClusterSeaTunnelWithTelemetryIT extends SeaTunnelContainer {
private GenericContainer<?> secondServer;
private final Network NETWORK = Network.newNetwork();
private static final String jobName = "test测试";
private static final String paramJobName = "param_test测试";
private static final String http = "http://";
private static final String colon = ":";
private static final String confFile = "/fakesource_to_console.conf";
private static final Path binPath = Paths.get(SEATUNNEL_HOME, "bin", SERVER_SHELL);
private static final Path config = Paths.get(SEATUNNEL_HOME, "config");
private static final Path hadoopJar =
Paths.get(SEATUNNEL_HOME, "lib/seatunnel-hadoop3-3.1.4-uber.jar");
@Test
public void testSubmitJobs() throws InterruptedException {
testGetMetrics(server, "seatunnel", true);
testGetMetrics(secondServer, "seatunnel", false);
}
@Override
@BeforeEach
public void startUp() throws Exception {
server = createServer("server", "master");
secondServer = createServer("secondServer", "worker");
// check cluster
Awaitility.await()
.atMost(2, TimeUnit.MINUTES)
.untilAsserted(
() -> {
Response response =
given().get(
http
+ server.getHost()
+ colon
+ server.getFirstMappedPort()
+ "/hazelcast/rest/cluster");
response.then().statusCode(200);
Assertions.assertEquals(
2, response.jsonPath().getList("members").size());
});
String JobId =
submitJob(
server,
server.getMappedPort(5801),
RestConstant.CONTEXT_PATH,
"STREAMING",
jobName,
paramJobName)
.getBody()
.jsonPath()
.getString("jobId");
Awaitility.await()
.atMost(2, TimeUnit.MINUTES)
.untilAsserted(
() -> {
Assertions.assertNotNull(JobId);
given().get(
http
+ server.getHost()
+ colon
+ server.getFirstMappedPort()
+ CONTEXT_PATH
+ RestConstant.REST_URL_JOB_INFO
+ "/"
+ JobId)
.then()
.statusCode(200)
.body("jobStatus", equalTo("RUNNING"));
});
}
public void testGetMetrics(GenericContainer<?> server, String testClusterName, boolean isMaster)
throws InterruptedException {
Response response =
given().get(
http
+ server.getHost()
+ colon
+ server.getFirstMappedPort()
+ "/hazelcast/rest/instance/metrics");
ValidatableResponse validatableResponse =
response.then()
.statusCode(200)
// Use regular expressions to verify whether the response body is the
// indicator data
// of Prometheus
// Metric data is usually multi-line, use newlines for validation
.body(matchesRegex("(?s)^.*# HELP.*# TYPE.*$"))
// Verify that the response body contains a specific metric
// JVM metrics
.body(containsString("jvm_threads"))
.body(containsString("jvm_memory_pool"))
.body(containsString("jvm_gc"))
.body(containsString("jvm_info"))
.body(containsString("jvm_memory_bytes"))
.body(containsString("jvm_classes"))
.body(containsString("jvm_buffer_pool"))
.body(containsString("process_start"))
// cluster_info
.body(containsString("cluster_info{cluster=\"" + testClusterName))
// cluster_time
.body(containsString("cluster_time{cluster=\"" + testClusterName));
if (isMaster) {
validatableResponse
// Job thread pool metrics
.body(
matchesRegex(
"(?s)^.*job_thread_pool_activeCount\\{cluster=\""
+ testClusterName
+ "\",address=.*$"))
.body(
matchesRegex(
"(?s)^.*job_thread_pool_completedTask_total\\{cluster=\""
+ testClusterName
+ "\",address=.*$"))
.body(
matchesRegex(
"(?s)^.*job_thread_pool_corePoolSize\\{cluster=\""
+ testClusterName
+ "\",address=.*$"))
.body(
matchesRegex(
"(?s)^.*job_thread_pool_maximumPoolSize\\{cluster=\""
+ testClusterName
+ "\",address=.*$"))
.body(
matchesRegex(
"(?s)^.*job_thread_pool_poolSize\\{cluster=\""
+ testClusterName
+ "\",address=.*$"))
.body(
matchesRegex(
"(?s)^.*job_thread_pool_task_total\\{cluster=\""
+ testClusterName
+ "\",address=.*$"))
.body(
matchesRegex(
"(?s)^.*job_thread_pool_queueTaskCount\\{cluster=\""
+ testClusterName
+ "\",address=.*$"))
.body(
matchesRegex(
"(?s)^.*job_thread_pool_rejection_total\\{cluster=\""
+ testClusterName
+ "\",address=.*$"))
// Job count metrics
.body(
containsString(
"job_count{cluster=\""
+ testClusterName
+ "\",type=\"canceled\",} 0.0"))
.body(
containsString(
"job_count{cluster=\""
+ testClusterName
+ "\",type=\"cancelling\",} 0.0"))
.body(
containsString(
"job_count{cluster=\""
+ testClusterName
+ "\",type=\"created\",} 0.0"))
.body(
containsString(
"job_count{cluster=\""
+ testClusterName
+ "\",type=\"failed\",} 0.0"))
.body(
containsString(
"job_count{cluster=\""
+ testClusterName
+ "\",type=\"failing\",} 0.0"))
.body(
containsString(
"job_count{cluster=\""
+ testClusterName
+ "\",type=\"finished\",} 0.0"))
// Running job count is 1
.body(
containsString(
"job_count{cluster=\""
+ testClusterName
+ "\",type=\"running\",} 1.0"))
.body(
containsString(
"job_count{cluster=\""
+ testClusterName
+ "\",type=\"scheduled\",} 0.0"));
}
// Node
validatableResponse
.body(
matchesRegex(
"(?s)^.*node_state\\{cluster=\""
+ testClusterName
+ "\",address=.*$"))
// hazelcast_executor_executedCount
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_executedCount\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"async\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_executedCount\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"client\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_executedCount\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"clientBlocking\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_executedCount\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"clientQuery\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_executedCount\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"io\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_executedCount\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"offloadable\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_executedCount\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"scheduled\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_executedCount\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"system\".*$"))
// hazelcast_executor_isShutdown
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_isShutdown\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"async\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_isShutdown\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"client\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_isShutdown\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"clientBlocking\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_isShutdown\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"clientQuery\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_isShutdown\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"io\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_isShutdown\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"offloadable\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_isShutdown\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"scheduled\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_isShutdown\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"system\".*$"))
// hazelcast_executor_isTerminated
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_isTerminated\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"async\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_isTerminated\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"client\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_isTerminated\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"clientBlocking\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_isTerminated\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"clientQuery\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_isTerminated\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"io\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_isTerminated\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"offloadable\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_isTerminated\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"scheduled\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_isTerminated\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"system\".*$"))
// hazelcast_executor_maxPoolSize
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_maxPoolSize\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"async\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_maxPoolSize\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"client\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_maxPoolSize\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"clientBlocking\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_maxPoolSize\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"clientQuery\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_maxPoolSize\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"io\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_maxPoolSize\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"offloadable\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_maxPoolSize\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"scheduled\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_maxPoolSize\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"system\".*$"))
// hazelcast_executor_poolSize
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_poolSize\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"async\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_poolSize\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"client\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_poolSize\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"clientBlocking\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_poolSize\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"clientQuery\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_poolSize\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"io\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_poolSize\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"offloadable\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_poolSize\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"scheduled\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_poolSize\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"system\".*$"))
// hazelcast_executor_queueRemainingCapacity
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_queueRemainingCapacity\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"async\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_queueRemainingCapacity\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"client\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_queueRemainingCapacity\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"clientBlocking\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_queueRemainingCapacity\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"clientQuery\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_queueRemainingCapacity\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"io\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_queueRemainingCapacity\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"offloadable\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_queueRemainingCapacity\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"scheduled\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_queueRemainingCapacity\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"system\".*$"))
// hazelcast_executor_queueSize
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_queueSize\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"async\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_queueSize\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"client\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_queueSize\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"clientBlocking\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_queueSize\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"clientQuery\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_queueSize\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"io\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_queueSize\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"offloadable\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_queueSize\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"scheduled\".*$"))
.body(
matchesRegex(
"(?s)^.*hazelcast_executor_queueSize\\{cluster=\""
+ testClusterName
+ "\",address=.*,type=\"system\".*$"))
// hazelcast_partition_partitionCount
.body(
matchesRegex(
"(?s)^.*hazelcast_partition_partitionCount\\{cluster=\""
+ testClusterName
+ "\",address=.*$"))
// hazelcast_partition_activePartition
.body(
matchesRegex(
"(?s)^.*hazelcast_partition_activePartition\\{cluster=\""
+ testClusterName
+ "\",address=.*$"))
// hazelcast_partition_isClusterSafe
.body(
matchesRegex(
"(?s)^.*hazelcast_partition_isClusterSafe\\{cluster=\""
+ testClusterName
+ "\",address=.*$"))
// hazelcast_partition_isLocalMemberSafe
.body(
matchesRegex(
"(?s)^.*hazelcast_partition_isLocalMemberSafe\\{cluster=\""
+ testClusterName
+ "\",address=.*$"));
}
@Override
@AfterEach
public void tearDown() throws Exception {
super.tearDown();
if (secondServer != null) {
secondServer.close();
}
}
private Response submitJob(
GenericContainer<?> container,
int port,
String contextPath,
String jobMode,
String jobName,
String paramJobName) {
return submitJob(jobMode, container, port, contextPath, false, jobName, paramJobName);
}
private Response submitJob(
String jobMode,
GenericContainer<?> container,
int port,
String contextPath,
boolean isStartWithSavePoint,
String jobName,
String paramJobName) {
String requestBody =
"{\n"
+ " \"env\": {\n"
+ " \"job.name\": \""
+ jobName
+ "\",\n"
+ " \"job.mode\": \""
+ jobMode
+ "\"\n"
+ " },\n"
+ " \"source\": [\n"
+ " {\n"
+ " \"plugin_name\": \"FakeSource\",\n"
+ " \"plugin_output\": \"fake\",\n"
+ " \"row.num\": 100,\n"
+ " \"schema\": {\n"
+ " \"fields\": {\n"
+ " \"name\": \"string\",\n"
+ " \"age\": \"int\",\n"
+ " \"card\": \"int\"\n"
+ " }\n"
+ " }\n"
+ " }\n"
+ " ],\n"
+ " \"transform\": [\n"
+ " ],\n"
+ " \"sink\": [\n"
+ " {\n"
+ " \"plugin_name\": \"Console\",\n"
+ " \"plugin_input\": [\"fake\"]\n"
+ " }\n"
+ " ]\n"
+ "}";
String parameters = null;
if (paramJobName != null) {
parameters = "jobName=" + paramJobName;
}
if (isStartWithSavePoint) {
parameters = parameters + "&isStartWithSavePoint=true";
}
Response response =
given().body(requestBody)
.header("Content-Type", "application/json; charset=utf-8")
.post(
parameters == null
? http
+ container.getHost()
+ colon
+ port
+ contextPath
+ RestConstant.REST_URL_SUBMIT_JOB
: http
+ container.getHost()
+ colon
+ port
+ contextPath
+ RestConstant.REST_URL_SUBMIT_JOB
+ "?"
+ parameters);
return response;
}
private GenericContainer<?> createServer(String networkAlias, String role)
throws IOException, InterruptedException {
GenericContainer<?> server =
new GenericContainer<>(getDockerImage())
.withNetwork(NETWORK)
.withEnv("TZ", "UTC")
.withCommand(
ContainerUtil.adaptPathForWin(binPath.toString()) + " -r " + role)
.withNetworkAliases(networkAlias)
.withExposedPorts()
.withLogConsumer(
new Slf4jLogConsumer(
DockerLoggerFactory.getLogger(
"seatunnel-engine:" + JDK_DOCKER_IMAGE)))
.waitingFor(Wait.forListeningPort());
copySeaTunnelStarterToContainer(server);
server.setExposedPorts(Arrays.asList(5801));
server.withCopyFileToContainer(
MountableFile.forHostPath(
PROJECT_ROOT_PATH
+ "/seatunnel-e2e/seatunnel-engine-e2e/connector-seatunnel-e2e-base/src/test/resources/"),
config.toString());
server.withCopyFileToContainer(
MountableFile.forHostPath(
PROJECT_ROOT_PATH
+ "/seatunnel-e2e/seatunnel-engine-e2e/connector-seatunnel-e2e-base/src/test/resources/master-worker-cluster/"),
config.toString());
server.withCopyFileToContainer(
MountableFile.forHostPath(
PROJECT_ROOT_PATH
+ "/seatunnel-shade/seatunnel-hadoop3-3.1.4-uber/target/seatunnel-hadoop3-3.1.4-uber.jar"),
hadoopJar.toString());
server.start();
// execute extra commands
executeExtraCommands(server);
ContainerUtil.copyConnectorJarToContainer(
server,
confFile,
getConnectorModulePath(),
getConnectorNamePrefix(),
getConnectorType(),
SEATUNNEL_HOME);
return server;
}
}
|
googleapis/google-cloud-java | 36,922 | java-shopping-merchant-accounts/proto-google-shopping-merchant-accounts-v1/src/main/java/com/google/shopping/merchant/accounts/v1/UpdateEmailPreferencesRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/shopping/merchant/accounts/v1/emailpreferences.proto
// Protobuf Java Version: 3.25.8
package com.google.shopping.merchant.accounts.v1;
/**
*
*
* <pre>
* Request message for UpdateEmailPreferences method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest}
*/
public final class UpdateEmailPreferencesRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest)
UpdateEmailPreferencesRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateEmailPreferencesRequest.newBuilder() to construct.
private UpdateEmailPreferencesRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateEmailPreferencesRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateEmailPreferencesRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.accounts.v1.EmailPreferencesProto
.internal_static_google_shopping_merchant_accounts_v1_UpdateEmailPreferencesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.accounts.v1.EmailPreferencesProto
.internal_static_google_shopping_merchant_accounts_v1_UpdateEmailPreferencesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest.class,
com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest.Builder.class);
}
private int bitField0_;
public static final int EMAIL_PREFERENCES_FIELD_NUMBER = 1;
private com.google.shopping.merchant.accounts.v1.EmailPreferences emailPreferences_;
/**
*
*
* <pre>
* Required. Email Preferences to be updated.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.EmailPreferences email_preferences = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the emailPreferences field is set.
*/
@java.lang.Override
public boolean hasEmailPreferences() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Email Preferences to be updated.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.EmailPreferences email_preferences = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The emailPreferences.
*/
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.EmailPreferences getEmailPreferences() {
return emailPreferences_ == null
? com.google.shopping.merchant.accounts.v1.EmailPreferences.getDefaultInstance()
: emailPreferences_;
}
/**
*
*
* <pre>
* Required. Email Preferences to be updated.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.EmailPreferences email_preferences = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.EmailPreferencesOrBuilder
getEmailPreferencesOrBuilder() {
return emailPreferences_ == null
? com.google.shopping.merchant.accounts.v1.EmailPreferences.getDefaultInstance()
: emailPreferences_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `news_and_tips`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `news_and_tips`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `news_and_tips`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getEmailPreferences());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getEmailPreferences());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest)) {
return super.equals(obj);
}
com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest other =
(com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest) obj;
if (hasEmailPreferences() != other.hasEmailPreferences()) return false;
if (hasEmailPreferences()) {
if (!getEmailPreferences().equals(other.getEmailPreferences())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasEmailPreferences()) {
hash = (37 * hash) + EMAIL_PREFERENCES_FIELD_NUMBER;
hash = (53 * hash) + getEmailPreferences().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for UpdateEmailPreferences method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest)
com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.accounts.v1.EmailPreferencesProto
.internal_static_google_shopping_merchant_accounts_v1_UpdateEmailPreferencesRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.accounts.v1.EmailPreferencesProto
.internal_static_google_shopping_merchant_accounts_v1_UpdateEmailPreferencesRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest.class,
com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest.Builder.class);
}
// Construct using
// com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getEmailPreferencesFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
emailPreferences_ = null;
if (emailPreferencesBuilder_ != null) {
emailPreferencesBuilder_.dispose();
emailPreferencesBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.shopping.merchant.accounts.v1.EmailPreferencesProto
.internal_static_google_shopping_merchant_accounts_v1_UpdateEmailPreferencesRequest_descriptor;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest
getDefaultInstanceForType() {
return com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest build() {
com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest buildPartial() {
com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest result =
new com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.emailPreferences_ =
emailPreferencesBuilder_ == null ? emailPreferences_ : emailPreferencesBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest) {
return mergeFrom(
(com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest other) {
if (other
== com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest
.getDefaultInstance()) return this;
if (other.hasEmailPreferences()) {
mergeEmailPreferences(other.getEmailPreferences());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(
getEmailPreferencesFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.shopping.merchant.accounts.v1.EmailPreferences emailPreferences_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.shopping.merchant.accounts.v1.EmailPreferences,
com.google.shopping.merchant.accounts.v1.EmailPreferences.Builder,
com.google.shopping.merchant.accounts.v1.EmailPreferencesOrBuilder>
emailPreferencesBuilder_;
/**
*
*
* <pre>
* Required. Email Preferences to be updated.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.EmailPreferences email_preferences = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the emailPreferences field is set.
*/
public boolean hasEmailPreferences() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Email Preferences to be updated.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.EmailPreferences email_preferences = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The emailPreferences.
*/
public com.google.shopping.merchant.accounts.v1.EmailPreferences getEmailPreferences() {
if (emailPreferencesBuilder_ == null) {
return emailPreferences_ == null
? com.google.shopping.merchant.accounts.v1.EmailPreferences.getDefaultInstance()
: emailPreferences_;
} else {
return emailPreferencesBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Email Preferences to be updated.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.EmailPreferences email_preferences = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setEmailPreferences(
com.google.shopping.merchant.accounts.v1.EmailPreferences value) {
if (emailPreferencesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
emailPreferences_ = value;
} else {
emailPreferencesBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Email Preferences to be updated.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.EmailPreferences email_preferences = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setEmailPreferences(
com.google.shopping.merchant.accounts.v1.EmailPreferences.Builder builderForValue) {
if (emailPreferencesBuilder_ == null) {
emailPreferences_ = builderForValue.build();
} else {
emailPreferencesBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Email Preferences to be updated.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.EmailPreferences email_preferences = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeEmailPreferences(
com.google.shopping.merchant.accounts.v1.EmailPreferences value) {
if (emailPreferencesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& emailPreferences_ != null
&& emailPreferences_
!= com.google.shopping.merchant.accounts.v1.EmailPreferences.getDefaultInstance()) {
getEmailPreferencesBuilder().mergeFrom(value);
} else {
emailPreferences_ = value;
}
} else {
emailPreferencesBuilder_.mergeFrom(value);
}
if (emailPreferences_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Email Preferences to be updated.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.EmailPreferences email_preferences = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearEmailPreferences() {
bitField0_ = (bitField0_ & ~0x00000001);
emailPreferences_ = null;
if (emailPreferencesBuilder_ != null) {
emailPreferencesBuilder_.dispose();
emailPreferencesBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Email Preferences to be updated.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.EmailPreferences email_preferences = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.shopping.merchant.accounts.v1.EmailPreferences.Builder
getEmailPreferencesBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getEmailPreferencesFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Email Preferences to be updated.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.EmailPreferences email_preferences = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.shopping.merchant.accounts.v1.EmailPreferencesOrBuilder
getEmailPreferencesOrBuilder() {
if (emailPreferencesBuilder_ != null) {
return emailPreferencesBuilder_.getMessageOrBuilder();
} else {
return emailPreferences_ == null
? com.google.shopping.merchant.accounts.v1.EmailPreferences.getDefaultInstance()
: emailPreferences_;
}
}
/**
*
*
* <pre>
* Required. Email Preferences to be updated.
* </pre>
*
* <code>
* .google.shopping.merchant.accounts.v1.EmailPreferences email_preferences = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.shopping.merchant.accounts.v1.EmailPreferences,
com.google.shopping.merchant.accounts.v1.EmailPreferences.Builder,
com.google.shopping.merchant.accounts.v1.EmailPreferencesOrBuilder>
getEmailPreferencesFieldBuilder() {
if (emailPreferencesBuilder_ == null) {
emailPreferencesBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.shopping.merchant.accounts.v1.EmailPreferences,
com.google.shopping.merchant.accounts.v1.EmailPreferences.Builder,
com.google.shopping.merchant.accounts.v1.EmailPreferencesOrBuilder>(
getEmailPreferences(), getParentForChildren(), isClean());
emailPreferences_ = null;
}
return emailPreferencesBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `news_and_tips`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `news_and_tips`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `news_and_tips`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `news_and_tips`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `news_and_tips`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `news_and_tips`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `news_and_tips`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `news_and_tips`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. List of fields being updated.
*
* The following fields are supported (in both `snake_case` and
* `lowerCamelCase`):
*
* - `news_and_tips`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest)
}
// @@protoc_insertion_point(class_scope:google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest)
private static final com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest();
}
public static com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateEmailPreferencesRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateEmailPreferencesRequest>() {
@java.lang.Override
public UpdateEmailPreferencesRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateEmailPreferencesRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateEmailPreferencesRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.shopping.merchant.accounts.v1.UpdateEmailPreferencesRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,738 | java-vision/proto-google-cloud-vision-v1p3beta1/src/main/java/com/google/cloud/vision/v1p3beta1/Feature.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/vision/v1p3beta1/image_annotator.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.vision.v1p3beta1;
/**
*
*
* <pre>
* The type of Google Cloud Vision API detection to perform, and the maximum
* number of results to return for that type. Multiple `Feature` objects can
* be specified in the `features` list.
* </pre>
*
* Protobuf type {@code google.cloud.vision.v1p3beta1.Feature}
*/
public final class Feature extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.vision.v1p3beta1.Feature)
FeatureOrBuilder {
private static final long serialVersionUID = 0L;
// Use Feature.newBuilder() to construct.
private Feature(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Feature() {
type_ = 0;
model_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new Feature();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.vision.v1p3beta1.ImageAnnotatorProto
.internal_static_google_cloud_vision_v1p3beta1_Feature_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vision.v1p3beta1.ImageAnnotatorProto
.internal_static_google_cloud_vision_v1p3beta1_Feature_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vision.v1p3beta1.Feature.class,
com.google.cloud.vision.v1p3beta1.Feature.Builder.class);
}
/**
*
*
* <pre>
* Type of Google Cloud Vision API feature to be extracted.
* </pre>
*
* Protobuf enum {@code google.cloud.vision.v1p3beta1.Feature.Type}
*/
public enum Type implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Unspecified feature type.
* </pre>
*
* <code>TYPE_UNSPECIFIED = 0;</code>
*/
TYPE_UNSPECIFIED(0),
/**
*
*
* <pre>
* Run face detection.
* </pre>
*
* <code>FACE_DETECTION = 1;</code>
*/
FACE_DETECTION(1),
/**
*
*
* <pre>
* Run landmark detection.
* </pre>
*
* <code>LANDMARK_DETECTION = 2;</code>
*/
LANDMARK_DETECTION(2),
/**
*
*
* <pre>
* Run logo detection.
* </pre>
*
* <code>LOGO_DETECTION = 3;</code>
*/
LOGO_DETECTION(3),
/**
*
*
* <pre>
* Run label detection.
* </pre>
*
* <code>LABEL_DETECTION = 4;</code>
*/
LABEL_DETECTION(4),
/**
*
*
* <pre>
* Run text detection / optical character recognition (OCR). Text detection
* is optimized for areas of text within a larger image; if the image is
* a document, use `DOCUMENT_TEXT_DETECTION` instead.
* </pre>
*
* <code>TEXT_DETECTION = 5;</code>
*/
TEXT_DETECTION(5),
/**
*
*
* <pre>
* Run dense text document OCR. Takes precedence when both
* `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` are present.
* </pre>
*
* <code>DOCUMENT_TEXT_DETECTION = 11;</code>
*/
DOCUMENT_TEXT_DETECTION(11),
/**
*
*
* <pre>
* Run Safe Search to detect potentially unsafe
* or undesirable content.
* </pre>
*
* <code>SAFE_SEARCH_DETECTION = 6;</code>
*/
SAFE_SEARCH_DETECTION(6),
/**
*
*
* <pre>
* Compute a set of image properties, such as the
* image's dominant colors.
* </pre>
*
* <code>IMAGE_PROPERTIES = 7;</code>
*/
IMAGE_PROPERTIES(7),
/**
*
*
* <pre>
* Run crop hints.
* </pre>
*
* <code>CROP_HINTS = 9;</code>
*/
CROP_HINTS(9),
/**
*
*
* <pre>
* Run web detection.
* </pre>
*
* <code>WEB_DETECTION = 10;</code>
*/
WEB_DETECTION(10),
/**
*
*
* <pre>
* Run Product Search.
* </pre>
*
* <code>PRODUCT_SEARCH = 12;</code>
*/
PRODUCT_SEARCH(12),
/**
*
*
* <pre>
* Run localizer for object detection.
* </pre>
*
* <code>OBJECT_LOCALIZATION = 19;</code>
*/
OBJECT_LOCALIZATION(19),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Unspecified feature type.
* </pre>
*
* <code>TYPE_UNSPECIFIED = 0;</code>
*/
public static final int TYPE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Run face detection.
* </pre>
*
* <code>FACE_DETECTION = 1;</code>
*/
public static final int FACE_DETECTION_VALUE = 1;
/**
*
*
* <pre>
* Run landmark detection.
* </pre>
*
* <code>LANDMARK_DETECTION = 2;</code>
*/
public static final int LANDMARK_DETECTION_VALUE = 2;
/**
*
*
* <pre>
* Run logo detection.
* </pre>
*
* <code>LOGO_DETECTION = 3;</code>
*/
public static final int LOGO_DETECTION_VALUE = 3;
/**
*
*
* <pre>
* Run label detection.
* </pre>
*
* <code>LABEL_DETECTION = 4;</code>
*/
public static final int LABEL_DETECTION_VALUE = 4;
/**
*
*
* <pre>
* Run text detection / optical character recognition (OCR). Text detection
* is optimized for areas of text within a larger image; if the image is
* a document, use `DOCUMENT_TEXT_DETECTION` instead.
* </pre>
*
* <code>TEXT_DETECTION = 5;</code>
*/
public static final int TEXT_DETECTION_VALUE = 5;
/**
*
*
* <pre>
* Run dense text document OCR. Takes precedence when both
* `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` are present.
* </pre>
*
* <code>DOCUMENT_TEXT_DETECTION = 11;</code>
*/
public static final int DOCUMENT_TEXT_DETECTION_VALUE = 11;
/**
*
*
* <pre>
* Run Safe Search to detect potentially unsafe
* or undesirable content.
* </pre>
*
* <code>SAFE_SEARCH_DETECTION = 6;</code>
*/
public static final int SAFE_SEARCH_DETECTION_VALUE = 6;
/**
*
*
* <pre>
* Compute a set of image properties, such as the
* image's dominant colors.
* </pre>
*
* <code>IMAGE_PROPERTIES = 7;</code>
*/
public static final int IMAGE_PROPERTIES_VALUE = 7;
/**
*
*
* <pre>
* Run crop hints.
* </pre>
*
* <code>CROP_HINTS = 9;</code>
*/
public static final int CROP_HINTS_VALUE = 9;
/**
*
*
* <pre>
* Run web detection.
* </pre>
*
* <code>WEB_DETECTION = 10;</code>
*/
public static final int WEB_DETECTION_VALUE = 10;
/**
*
*
* <pre>
* Run Product Search.
* </pre>
*
* <code>PRODUCT_SEARCH = 12;</code>
*/
public static final int PRODUCT_SEARCH_VALUE = 12;
/**
*
*
* <pre>
* Run localizer for object detection.
* </pre>
*
* <code>OBJECT_LOCALIZATION = 19;</code>
*/
public static final int OBJECT_LOCALIZATION_VALUE = 19;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static Type valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static Type forNumber(int value) {
switch (value) {
case 0:
return TYPE_UNSPECIFIED;
case 1:
return FACE_DETECTION;
case 2:
return LANDMARK_DETECTION;
case 3:
return LOGO_DETECTION;
case 4:
return LABEL_DETECTION;
case 5:
return TEXT_DETECTION;
case 11:
return DOCUMENT_TEXT_DETECTION;
case 6:
return SAFE_SEARCH_DETECTION;
case 7:
return IMAGE_PROPERTIES;
case 9:
return CROP_HINTS;
case 10:
return WEB_DETECTION;
case 12:
return PRODUCT_SEARCH;
case 19:
return OBJECT_LOCALIZATION;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<Type> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<Type> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<Type>() {
public Type findValueByNumber(int number) {
return Type.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.vision.v1p3beta1.Feature.getDescriptor().getEnumTypes().get(0);
}
private static final Type[] VALUES = values();
public static Type valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private Type(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.vision.v1p3beta1.Feature.Type)
}
public static final int TYPE_FIELD_NUMBER = 1;
private int type_ = 0;
/**
*
*
* <pre>
* The feature type.
* </pre>
*
* <code>.google.cloud.vision.v1p3beta1.Feature.Type type = 1;</code>
*
* @return The enum numeric value on the wire for type.
*/
@java.lang.Override
public int getTypeValue() {
return type_;
}
/**
*
*
* <pre>
* The feature type.
* </pre>
*
* <code>.google.cloud.vision.v1p3beta1.Feature.Type type = 1;</code>
*
* @return The type.
*/
@java.lang.Override
public com.google.cloud.vision.v1p3beta1.Feature.Type getType() {
com.google.cloud.vision.v1p3beta1.Feature.Type result =
com.google.cloud.vision.v1p3beta1.Feature.Type.forNumber(type_);
return result == null ? com.google.cloud.vision.v1p3beta1.Feature.Type.UNRECOGNIZED : result;
}
public static final int MAX_RESULTS_FIELD_NUMBER = 2;
private int maxResults_ = 0;
/**
*
*
* <pre>
* Maximum number of results of this type. Does not apply to
* `TEXT_DETECTION`, `DOCUMENT_TEXT_DETECTION`, or `CROP_HINTS`.
* </pre>
*
* <code>int32 max_results = 2;</code>
*
* @return The maxResults.
*/
@java.lang.Override
public int getMaxResults() {
return maxResults_;
}
public static final int MODEL_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object model_ = "";
/**
*
*
* <pre>
* Model to use for the feature.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest". `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` also
* support "builtin/weekly" for the bleeding edge release updated weekly.
* </pre>
*
* <code>string model = 3;</code>
*
* @return The model.
*/
@java.lang.Override
public java.lang.String getModel() {
java.lang.Object ref = model_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
model_ = s;
return s;
}
}
/**
*
*
* <pre>
* Model to use for the feature.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest". `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` also
* support "builtin/weekly" for the bleeding edge release updated weekly.
* </pre>
*
* <code>string model = 3;</code>
*
* @return The bytes for model.
*/
@java.lang.Override
public com.google.protobuf.ByteString getModelBytes() {
java.lang.Object ref = model_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
model_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (type_ != com.google.cloud.vision.v1p3beta1.Feature.Type.TYPE_UNSPECIFIED.getNumber()) {
output.writeEnum(1, type_);
}
if (maxResults_ != 0) {
output.writeInt32(2, maxResults_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(model_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, model_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (type_ != com.google.cloud.vision.v1p3beta1.Feature.Type.TYPE_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, type_);
}
if (maxResults_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, maxResults_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(model_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, model_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.vision.v1p3beta1.Feature)) {
return super.equals(obj);
}
com.google.cloud.vision.v1p3beta1.Feature other =
(com.google.cloud.vision.v1p3beta1.Feature) obj;
if (type_ != other.type_) return false;
if (getMaxResults() != other.getMaxResults()) return false;
if (!getModel().equals(other.getModel())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + TYPE_FIELD_NUMBER;
hash = (53 * hash) + type_;
hash = (37 * hash) + MAX_RESULTS_FIELD_NUMBER;
hash = (53 * hash) + getMaxResults();
hash = (37 * hash) + MODEL_FIELD_NUMBER;
hash = (53 * hash) + getModel().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.vision.v1p3beta1.Feature parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vision.v1p3beta1.Feature parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vision.v1p3beta1.Feature parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vision.v1p3beta1.Feature parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vision.v1p3beta1.Feature parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vision.v1p3beta1.Feature parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vision.v1p3beta1.Feature parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1p3beta1.Feature parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.vision.v1p3beta1.Feature parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1p3beta1.Feature parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.vision.v1p3beta1.Feature parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1p3beta1.Feature parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.vision.v1p3beta1.Feature prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The type of Google Cloud Vision API detection to perform, and the maximum
* number of results to return for that type. Multiple `Feature` objects can
* be specified in the `features` list.
* </pre>
*
* Protobuf type {@code google.cloud.vision.v1p3beta1.Feature}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.vision.v1p3beta1.Feature)
com.google.cloud.vision.v1p3beta1.FeatureOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.vision.v1p3beta1.ImageAnnotatorProto
.internal_static_google_cloud_vision_v1p3beta1_Feature_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vision.v1p3beta1.ImageAnnotatorProto
.internal_static_google_cloud_vision_v1p3beta1_Feature_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vision.v1p3beta1.Feature.class,
com.google.cloud.vision.v1p3beta1.Feature.Builder.class);
}
// Construct using com.google.cloud.vision.v1p3beta1.Feature.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
type_ = 0;
maxResults_ = 0;
model_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.vision.v1p3beta1.ImageAnnotatorProto
.internal_static_google_cloud_vision_v1p3beta1_Feature_descriptor;
}
@java.lang.Override
public com.google.cloud.vision.v1p3beta1.Feature getDefaultInstanceForType() {
return com.google.cloud.vision.v1p3beta1.Feature.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.vision.v1p3beta1.Feature build() {
com.google.cloud.vision.v1p3beta1.Feature result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.vision.v1p3beta1.Feature buildPartial() {
com.google.cloud.vision.v1p3beta1.Feature result =
new com.google.cloud.vision.v1p3beta1.Feature(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.vision.v1p3beta1.Feature result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.type_ = type_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.maxResults_ = maxResults_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.model_ = model_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.vision.v1p3beta1.Feature) {
return mergeFrom((com.google.cloud.vision.v1p3beta1.Feature) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.vision.v1p3beta1.Feature other) {
if (other == com.google.cloud.vision.v1p3beta1.Feature.getDefaultInstance()) return this;
if (other.type_ != 0) {
setTypeValue(other.getTypeValue());
}
if (other.getMaxResults() != 0) {
setMaxResults(other.getMaxResults());
}
if (!other.getModel().isEmpty()) {
model_ = other.model_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
type_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 16:
{
maxResults_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
model_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int type_ = 0;
/**
*
*
* <pre>
* The feature type.
* </pre>
*
* <code>.google.cloud.vision.v1p3beta1.Feature.Type type = 1;</code>
*
* @return The enum numeric value on the wire for type.
*/
@java.lang.Override
public int getTypeValue() {
return type_;
}
/**
*
*
* <pre>
* The feature type.
* </pre>
*
* <code>.google.cloud.vision.v1p3beta1.Feature.Type type = 1;</code>
*
* @param value The enum numeric value on the wire for type to set.
* @return This builder for chaining.
*/
public Builder setTypeValue(int value) {
type_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The feature type.
* </pre>
*
* <code>.google.cloud.vision.v1p3beta1.Feature.Type type = 1;</code>
*
* @return The type.
*/
@java.lang.Override
public com.google.cloud.vision.v1p3beta1.Feature.Type getType() {
com.google.cloud.vision.v1p3beta1.Feature.Type result =
com.google.cloud.vision.v1p3beta1.Feature.Type.forNumber(type_);
return result == null ? com.google.cloud.vision.v1p3beta1.Feature.Type.UNRECOGNIZED : result;
}
/**
*
*
* <pre>
* The feature type.
* </pre>
*
* <code>.google.cloud.vision.v1p3beta1.Feature.Type type = 1;</code>
*
* @param value The type to set.
* @return This builder for chaining.
*/
public Builder setType(com.google.cloud.vision.v1p3beta1.Feature.Type value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
type_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* The feature type.
* </pre>
*
* <code>.google.cloud.vision.v1p3beta1.Feature.Type type = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearType() {
bitField0_ = (bitField0_ & ~0x00000001);
type_ = 0;
onChanged();
return this;
}
private int maxResults_;
/**
*
*
* <pre>
* Maximum number of results of this type. Does not apply to
* `TEXT_DETECTION`, `DOCUMENT_TEXT_DETECTION`, or `CROP_HINTS`.
* </pre>
*
* <code>int32 max_results = 2;</code>
*
* @return The maxResults.
*/
@java.lang.Override
public int getMaxResults() {
return maxResults_;
}
/**
*
*
* <pre>
* Maximum number of results of this type. Does not apply to
* `TEXT_DETECTION`, `DOCUMENT_TEXT_DETECTION`, or `CROP_HINTS`.
* </pre>
*
* <code>int32 max_results = 2;</code>
*
* @param value The maxResults to set.
* @return This builder for chaining.
*/
public Builder setMaxResults(int value) {
maxResults_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Maximum number of results of this type. Does not apply to
* `TEXT_DETECTION`, `DOCUMENT_TEXT_DETECTION`, or `CROP_HINTS`.
* </pre>
*
* <code>int32 max_results = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearMaxResults() {
bitField0_ = (bitField0_ & ~0x00000002);
maxResults_ = 0;
onChanged();
return this;
}
private java.lang.Object model_ = "";
/**
*
*
* <pre>
* Model to use for the feature.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest". `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` also
* support "builtin/weekly" for the bleeding edge release updated weekly.
* </pre>
*
* <code>string model = 3;</code>
*
* @return The model.
*/
public java.lang.String getModel() {
java.lang.Object ref = model_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
model_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Model to use for the feature.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest". `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` also
* support "builtin/weekly" for the bleeding edge release updated weekly.
* </pre>
*
* <code>string model = 3;</code>
*
* @return The bytes for model.
*/
public com.google.protobuf.ByteString getModelBytes() {
java.lang.Object ref = model_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
model_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Model to use for the feature.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest". `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` also
* support "builtin/weekly" for the bleeding edge release updated weekly.
* </pre>
*
* <code>string model = 3;</code>
*
* @param value The model to set.
* @return This builder for chaining.
*/
public Builder setModel(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
model_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Model to use for the feature.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest". `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` also
* support "builtin/weekly" for the bleeding edge release updated weekly.
* </pre>
*
* <code>string model = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearModel() {
model_ = getDefaultInstance().getModel();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Model to use for the feature.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest". `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` also
* support "builtin/weekly" for the bleeding edge release updated weekly.
* </pre>
*
* <code>string model = 3;</code>
*
* @param value The bytes for model to set.
* @return This builder for chaining.
*/
public Builder setModelBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
model_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.vision.v1p3beta1.Feature)
}
// @@protoc_insertion_point(class_scope:google.cloud.vision.v1p3beta1.Feature)
private static final com.google.cloud.vision.v1p3beta1.Feature DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.vision.v1p3beta1.Feature();
}
public static com.google.cloud.vision.v1p3beta1.Feature getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Feature> PARSER =
new com.google.protobuf.AbstractParser<Feature>() {
@java.lang.Override
public Feature parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<Feature> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Feature> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.vision.v1p3beta1.Feature getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,738 | java-vision/proto-google-cloud-vision-v1p4beta1/src/main/java/com/google/cloud/vision/v1p4beta1/Feature.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/vision/v1p4beta1/image_annotator.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.vision.v1p4beta1;
/**
*
*
* <pre>
* The type of Google Cloud Vision API detection to perform, and the maximum
* number of results to return for that type. Multiple `Feature` objects can
* be specified in the `features` list.
* </pre>
*
* Protobuf type {@code google.cloud.vision.v1p4beta1.Feature}
*/
public final class Feature extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.vision.v1p4beta1.Feature)
FeatureOrBuilder {
private static final long serialVersionUID = 0L;
// Use Feature.newBuilder() to construct.
private Feature(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Feature() {
type_ = 0;
model_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new Feature();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.vision.v1p4beta1.ImageAnnotatorProto
.internal_static_google_cloud_vision_v1p4beta1_Feature_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vision.v1p4beta1.ImageAnnotatorProto
.internal_static_google_cloud_vision_v1p4beta1_Feature_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vision.v1p4beta1.Feature.class,
com.google.cloud.vision.v1p4beta1.Feature.Builder.class);
}
/**
*
*
* <pre>
* Type of Google Cloud Vision API feature to be extracted.
* </pre>
*
* Protobuf enum {@code google.cloud.vision.v1p4beta1.Feature.Type}
*/
public enum Type implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Unspecified feature type.
* </pre>
*
* <code>TYPE_UNSPECIFIED = 0;</code>
*/
TYPE_UNSPECIFIED(0),
/**
*
*
* <pre>
* Run face detection.
* </pre>
*
* <code>FACE_DETECTION = 1;</code>
*/
FACE_DETECTION(1),
/**
*
*
* <pre>
* Run landmark detection.
* </pre>
*
* <code>LANDMARK_DETECTION = 2;</code>
*/
LANDMARK_DETECTION(2),
/**
*
*
* <pre>
* Run logo detection.
* </pre>
*
* <code>LOGO_DETECTION = 3;</code>
*/
LOGO_DETECTION(3),
/**
*
*
* <pre>
* Run label detection.
* </pre>
*
* <code>LABEL_DETECTION = 4;</code>
*/
LABEL_DETECTION(4),
/**
*
*
* <pre>
* Run text detection / optical character recognition (OCR). Text detection
* is optimized for areas of text within a larger image; if the image is
* a document, use `DOCUMENT_TEXT_DETECTION` instead.
* </pre>
*
* <code>TEXT_DETECTION = 5;</code>
*/
TEXT_DETECTION(5),
/**
*
*
* <pre>
* Run dense text document OCR. Takes precedence when both
* `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` are present.
* </pre>
*
* <code>DOCUMENT_TEXT_DETECTION = 11;</code>
*/
DOCUMENT_TEXT_DETECTION(11),
/**
*
*
* <pre>
* Run Safe Search to detect potentially unsafe
* or undesirable content.
* </pre>
*
* <code>SAFE_SEARCH_DETECTION = 6;</code>
*/
SAFE_SEARCH_DETECTION(6),
/**
*
*
* <pre>
* Compute a set of image properties, such as the
* image's dominant colors.
* </pre>
*
* <code>IMAGE_PROPERTIES = 7;</code>
*/
IMAGE_PROPERTIES(7),
/**
*
*
* <pre>
* Run crop hints.
* </pre>
*
* <code>CROP_HINTS = 9;</code>
*/
CROP_HINTS(9),
/**
*
*
* <pre>
* Run web detection.
* </pre>
*
* <code>WEB_DETECTION = 10;</code>
*/
WEB_DETECTION(10),
/**
*
*
* <pre>
* Run Product Search.
* </pre>
*
* <code>PRODUCT_SEARCH = 12;</code>
*/
PRODUCT_SEARCH(12),
/**
*
*
* <pre>
* Run localizer for object detection.
* </pre>
*
* <code>OBJECT_LOCALIZATION = 19;</code>
*/
OBJECT_LOCALIZATION(19),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Unspecified feature type.
* </pre>
*
* <code>TYPE_UNSPECIFIED = 0;</code>
*/
public static final int TYPE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Run face detection.
* </pre>
*
* <code>FACE_DETECTION = 1;</code>
*/
public static final int FACE_DETECTION_VALUE = 1;
/**
*
*
* <pre>
* Run landmark detection.
* </pre>
*
* <code>LANDMARK_DETECTION = 2;</code>
*/
public static final int LANDMARK_DETECTION_VALUE = 2;
/**
*
*
* <pre>
* Run logo detection.
* </pre>
*
* <code>LOGO_DETECTION = 3;</code>
*/
public static final int LOGO_DETECTION_VALUE = 3;
/**
*
*
* <pre>
* Run label detection.
* </pre>
*
* <code>LABEL_DETECTION = 4;</code>
*/
public static final int LABEL_DETECTION_VALUE = 4;
/**
*
*
* <pre>
* Run text detection / optical character recognition (OCR). Text detection
* is optimized for areas of text within a larger image; if the image is
* a document, use `DOCUMENT_TEXT_DETECTION` instead.
* </pre>
*
* <code>TEXT_DETECTION = 5;</code>
*/
public static final int TEXT_DETECTION_VALUE = 5;
/**
*
*
* <pre>
* Run dense text document OCR. Takes precedence when both
* `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` are present.
* </pre>
*
* <code>DOCUMENT_TEXT_DETECTION = 11;</code>
*/
public static final int DOCUMENT_TEXT_DETECTION_VALUE = 11;
/**
*
*
* <pre>
* Run Safe Search to detect potentially unsafe
* or undesirable content.
* </pre>
*
* <code>SAFE_SEARCH_DETECTION = 6;</code>
*/
public static final int SAFE_SEARCH_DETECTION_VALUE = 6;
/**
*
*
* <pre>
* Compute a set of image properties, such as the
* image's dominant colors.
* </pre>
*
* <code>IMAGE_PROPERTIES = 7;</code>
*/
public static final int IMAGE_PROPERTIES_VALUE = 7;
/**
*
*
* <pre>
* Run crop hints.
* </pre>
*
* <code>CROP_HINTS = 9;</code>
*/
public static final int CROP_HINTS_VALUE = 9;
/**
*
*
* <pre>
* Run web detection.
* </pre>
*
* <code>WEB_DETECTION = 10;</code>
*/
public static final int WEB_DETECTION_VALUE = 10;
/**
*
*
* <pre>
* Run Product Search.
* </pre>
*
* <code>PRODUCT_SEARCH = 12;</code>
*/
public static final int PRODUCT_SEARCH_VALUE = 12;
/**
*
*
* <pre>
* Run localizer for object detection.
* </pre>
*
* <code>OBJECT_LOCALIZATION = 19;</code>
*/
public static final int OBJECT_LOCALIZATION_VALUE = 19;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static Type valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static Type forNumber(int value) {
switch (value) {
case 0:
return TYPE_UNSPECIFIED;
case 1:
return FACE_DETECTION;
case 2:
return LANDMARK_DETECTION;
case 3:
return LOGO_DETECTION;
case 4:
return LABEL_DETECTION;
case 5:
return TEXT_DETECTION;
case 11:
return DOCUMENT_TEXT_DETECTION;
case 6:
return SAFE_SEARCH_DETECTION;
case 7:
return IMAGE_PROPERTIES;
case 9:
return CROP_HINTS;
case 10:
return WEB_DETECTION;
case 12:
return PRODUCT_SEARCH;
case 19:
return OBJECT_LOCALIZATION;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<Type> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<Type> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<Type>() {
public Type findValueByNumber(int number) {
return Type.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.vision.v1p4beta1.Feature.getDescriptor().getEnumTypes().get(0);
}
private static final Type[] VALUES = values();
public static Type valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private Type(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.vision.v1p4beta1.Feature.Type)
}
public static final int TYPE_FIELD_NUMBER = 1;
private int type_ = 0;
/**
*
*
* <pre>
* The feature type.
* </pre>
*
* <code>.google.cloud.vision.v1p4beta1.Feature.Type type = 1;</code>
*
* @return The enum numeric value on the wire for type.
*/
@java.lang.Override
public int getTypeValue() {
return type_;
}
/**
*
*
* <pre>
* The feature type.
* </pre>
*
* <code>.google.cloud.vision.v1p4beta1.Feature.Type type = 1;</code>
*
* @return The type.
*/
@java.lang.Override
public com.google.cloud.vision.v1p4beta1.Feature.Type getType() {
com.google.cloud.vision.v1p4beta1.Feature.Type result =
com.google.cloud.vision.v1p4beta1.Feature.Type.forNumber(type_);
return result == null ? com.google.cloud.vision.v1p4beta1.Feature.Type.UNRECOGNIZED : result;
}
public static final int MAX_RESULTS_FIELD_NUMBER = 2;
private int maxResults_ = 0;
/**
*
*
* <pre>
* Maximum number of results of this type. Does not apply to
* `TEXT_DETECTION`, `DOCUMENT_TEXT_DETECTION`, or `CROP_HINTS`.
* </pre>
*
* <code>int32 max_results = 2;</code>
*
* @return The maxResults.
*/
@java.lang.Override
public int getMaxResults() {
return maxResults_;
}
public static final int MODEL_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object model_ = "";
/**
*
*
* <pre>
* Model to use for the feature.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest". `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` also
* support "builtin/weekly" for the bleeding edge release updated weekly.
* </pre>
*
* <code>string model = 3;</code>
*
* @return The model.
*/
@java.lang.Override
public java.lang.String getModel() {
java.lang.Object ref = model_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
model_ = s;
return s;
}
}
/**
*
*
* <pre>
* Model to use for the feature.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest". `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` also
* support "builtin/weekly" for the bleeding edge release updated weekly.
* </pre>
*
* <code>string model = 3;</code>
*
* @return The bytes for model.
*/
@java.lang.Override
public com.google.protobuf.ByteString getModelBytes() {
java.lang.Object ref = model_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
model_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (type_ != com.google.cloud.vision.v1p4beta1.Feature.Type.TYPE_UNSPECIFIED.getNumber()) {
output.writeEnum(1, type_);
}
if (maxResults_ != 0) {
output.writeInt32(2, maxResults_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(model_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, model_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (type_ != com.google.cloud.vision.v1p4beta1.Feature.Type.TYPE_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, type_);
}
if (maxResults_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, maxResults_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(model_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, model_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.vision.v1p4beta1.Feature)) {
return super.equals(obj);
}
com.google.cloud.vision.v1p4beta1.Feature other =
(com.google.cloud.vision.v1p4beta1.Feature) obj;
if (type_ != other.type_) return false;
if (getMaxResults() != other.getMaxResults()) return false;
if (!getModel().equals(other.getModel())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + TYPE_FIELD_NUMBER;
hash = (53 * hash) + type_;
hash = (37 * hash) + MAX_RESULTS_FIELD_NUMBER;
hash = (53 * hash) + getMaxResults();
hash = (37 * hash) + MODEL_FIELD_NUMBER;
hash = (53 * hash) + getModel().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.vision.v1p4beta1.Feature parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vision.v1p4beta1.Feature parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vision.v1p4beta1.Feature parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vision.v1p4beta1.Feature parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vision.v1p4beta1.Feature parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.vision.v1p4beta1.Feature parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.vision.v1p4beta1.Feature parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1p4beta1.Feature parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.vision.v1p4beta1.Feature parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1p4beta1.Feature parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.vision.v1p4beta1.Feature parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.vision.v1p4beta1.Feature parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.vision.v1p4beta1.Feature prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The type of Google Cloud Vision API detection to perform, and the maximum
* number of results to return for that type. Multiple `Feature` objects can
* be specified in the `features` list.
* </pre>
*
* Protobuf type {@code google.cloud.vision.v1p4beta1.Feature}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.vision.v1p4beta1.Feature)
com.google.cloud.vision.v1p4beta1.FeatureOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.vision.v1p4beta1.ImageAnnotatorProto
.internal_static_google_cloud_vision_v1p4beta1_Feature_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.vision.v1p4beta1.ImageAnnotatorProto
.internal_static_google_cloud_vision_v1p4beta1_Feature_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.vision.v1p4beta1.Feature.class,
com.google.cloud.vision.v1p4beta1.Feature.Builder.class);
}
// Construct using com.google.cloud.vision.v1p4beta1.Feature.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
type_ = 0;
maxResults_ = 0;
model_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.vision.v1p4beta1.ImageAnnotatorProto
.internal_static_google_cloud_vision_v1p4beta1_Feature_descriptor;
}
@java.lang.Override
public com.google.cloud.vision.v1p4beta1.Feature getDefaultInstanceForType() {
return com.google.cloud.vision.v1p4beta1.Feature.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.vision.v1p4beta1.Feature build() {
com.google.cloud.vision.v1p4beta1.Feature result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.vision.v1p4beta1.Feature buildPartial() {
com.google.cloud.vision.v1p4beta1.Feature result =
new com.google.cloud.vision.v1p4beta1.Feature(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.vision.v1p4beta1.Feature result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.type_ = type_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.maxResults_ = maxResults_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.model_ = model_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.vision.v1p4beta1.Feature) {
return mergeFrom((com.google.cloud.vision.v1p4beta1.Feature) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.vision.v1p4beta1.Feature other) {
if (other == com.google.cloud.vision.v1p4beta1.Feature.getDefaultInstance()) return this;
if (other.type_ != 0) {
setTypeValue(other.getTypeValue());
}
if (other.getMaxResults() != 0) {
setMaxResults(other.getMaxResults());
}
if (!other.getModel().isEmpty()) {
model_ = other.model_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
type_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 16:
{
maxResults_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
model_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int type_ = 0;
/**
*
*
* <pre>
* The feature type.
* </pre>
*
* <code>.google.cloud.vision.v1p4beta1.Feature.Type type = 1;</code>
*
* @return The enum numeric value on the wire for type.
*/
@java.lang.Override
public int getTypeValue() {
return type_;
}
/**
*
*
* <pre>
* The feature type.
* </pre>
*
* <code>.google.cloud.vision.v1p4beta1.Feature.Type type = 1;</code>
*
* @param value The enum numeric value on the wire for type to set.
* @return This builder for chaining.
*/
public Builder setTypeValue(int value) {
type_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The feature type.
* </pre>
*
* <code>.google.cloud.vision.v1p4beta1.Feature.Type type = 1;</code>
*
* @return The type.
*/
@java.lang.Override
public com.google.cloud.vision.v1p4beta1.Feature.Type getType() {
com.google.cloud.vision.v1p4beta1.Feature.Type result =
com.google.cloud.vision.v1p4beta1.Feature.Type.forNumber(type_);
return result == null ? com.google.cloud.vision.v1p4beta1.Feature.Type.UNRECOGNIZED : result;
}
/**
*
*
* <pre>
* The feature type.
* </pre>
*
* <code>.google.cloud.vision.v1p4beta1.Feature.Type type = 1;</code>
*
* @param value The type to set.
* @return This builder for chaining.
*/
public Builder setType(com.google.cloud.vision.v1p4beta1.Feature.Type value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
type_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* The feature type.
* </pre>
*
* <code>.google.cloud.vision.v1p4beta1.Feature.Type type = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearType() {
bitField0_ = (bitField0_ & ~0x00000001);
type_ = 0;
onChanged();
return this;
}
private int maxResults_;
/**
*
*
* <pre>
* Maximum number of results of this type. Does not apply to
* `TEXT_DETECTION`, `DOCUMENT_TEXT_DETECTION`, or `CROP_HINTS`.
* </pre>
*
* <code>int32 max_results = 2;</code>
*
* @return The maxResults.
*/
@java.lang.Override
public int getMaxResults() {
return maxResults_;
}
/**
*
*
* <pre>
* Maximum number of results of this type. Does not apply to
* `TEXT_DETECTION`, `DOCUMENT_TEXT_DETECTION`, or `CROP_HINTS`.
* </pre>
*
* <code>int32 max_results = 2;</code>
*
* @param value The maxResults to set.
* @return This builder for chaining.
*/
public Builder setMaxResults(int value) {
maxResults_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Maximum number of results of this type. Does not apply to
* `TEXT_DETECTION`, `DOCUMENT_TEXT_DETECTION`, or `CROP_HINTS`.
* </pre>
*
* <code>int32 max_results = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearMaxResults() {
bitField0_ = (bitField0_ & ~0x00000002);
maxResults_ = 0;
onChanged();
return this;
}
private java.lang.Object model_ = "";
/**
*
*
* <pre>
* Model to use for the feature.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest". `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` also
* support "builtin/weekly" for the bleeding edge release updated weekly.
* </pre>
*
* <code>string model = 3;</code>
*
* @return The model.
*/
public java.lang.String getModel() {
java.lang.Object ref = model_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
model_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Model to use for the feature.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest". `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` also
* support "builtin/weekly" for the bleeding edge release updated weekly.
* </pre>
*
* <code>string model = 3;</code>
*
* @return The bytes for model.
*/
public com.google.protobuf.ByteString getModelBytes() {
java.lang.Object ref = model_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
model_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Model to use for the feature.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest". `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` also
* support "builtin/weekly" for the bleeding edge release updated weekly.
* </pre>
*
* <code>string model = 3;</code>
*
* @param value The model to set.
* @return This builder for chaining.
*/
public Builder setModel(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
model_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Model to use for the feature.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest". `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` also
* support "builtin/weekly" for the bleeding edge release updated weekly.
* </pre>
*
* <code>string model = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearModel() {
model_ = getDefaultInstance().getModel();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Model to use for the feature.
* Supported values: "builtin/stable" (the default if unset) and
* "builtin/latest". `DOCUMENT_TEXT_DETECTION` and `TEXT_DETECTION` also
* support "builtin/weekly" for the bleeding edge release updated weekly.
* </pre>
*
* <code>string model = 3;</code>
*
* @param value The bytes for model to set.
* @return This builder for chaining.
*/
public Builder setModelBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
model_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.vision.v1p4beta1.Feature)
}
// @@protoc_insertion_point(class_scope:google.cloud.vision.v1p4beta1.Feature)
private static final com.google.cloud.vision.v1p4beta1.Feature DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.vision.v1p4beta1.Feature();
}
public static com.google.cloud.vision.v1p4beta1.Feature getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Feature> PARSER =
new com.google.protobuf.AbstractParser<Feature>() {
@java.lang.Override
public Feature parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<Feature> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Feature> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.vision.v1p4beta1.Feature getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,914 | java-retail/proto-google-cloud-retail-v2/src/main/java/com/google/cloud/retail/v2/Catalog.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/retail/v2/catalog.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.retail.v2;
/**
*
*
* <pre>
* The catalog configuration.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2.Catalog}
*/
public final class Catalog extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.retail.v2.Catalog)
CatalogOrBuilder {
private static final long serialVersionUID = 0L;
// Use Catalog.newBuilder() to construct.
private Catalog(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private Catalog() {
name_ = "";
displayName_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new Catalog();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2.CatalogProto
.internal_static_google_cloud_retail_v2_Catalog_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2.CatalogProto
.internal_static_google_cloud_retail_v2_Catalog_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2.Catalog.class,
com.google.cloud.retail.v2.Catalog.Builder.class);
}
private int bitField0_;
public static final int NAME_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. Immutable. The fully qualified resource name of the catalog.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return The name.
*/
@java.lang.Override
public java.lang.String getName() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Immutable. The fully qualified resource name of the catalog.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return The bytes for name.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int DISPLAY_NAME_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object displayName_ = "";
/**
*
*
* <pre>
* Required. Immutable. The catalog display name.
*
* This field must be a UTF-8 encoded string with a length limit of 128
* characters. Otherwise, an INVALID_ARGUMENT error is returned.
* </pre>
*
* <code>
* string display_name = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return The displayName.
*/
@java.lang.Override
public java.lang.String getDisplayName() {
java.lang.Object ref = displayName_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
displayName_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Immutable. The catalog display name.
*
* This field must be a UTF-8 encoded string with a length limit of 128
* characters. Otherwise, an INVALID_ARGUMENT error is returned.
* </pre>
*
* <code>
* string display_name = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return The bytes for displayName.
*/
@java.lang.Override
public com.google.protobuf.ByteString getDisplayNameBytes() {
java.lang.Object ref = displayName_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
displayName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PRODUCT_LEVEL_CONFIG_FIELD_NUMBER = 4;
private com.google.cloud.retail.v2.ProductLevelConfig productLevelConfig_;
/**
*
*
* <pre>
* Required. The product level configuration.
* </pre>
*
* <code>
* .google.cloud.retail.v2.ProductLevelConfig product_level_config = 4 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the productLevelConfig field is set.
*/
@java.lang.Override
public boolean hasProductLevelConfig() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. The product level configuration.
* </pre>
*
* <code>
* .google.cloud.retail.v2.ProductLevelConfig product_level_config = 4 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The productLevelConfig.
*/
@java.lang.Override
public com.google.cloud.retail.v2.ProductLevelConfig getProductLevelConfig() {
return productLevelConfig_ == null
? com.google.cloud.retail.v2.ProductLevelConfig.getDefaultInstance()
: productLevelConfig_;
}
/**
*
*
* <pre>
* Required. The product level configuration.
* </pre>
*
* <code>
* .google.cloud.retail.v2.ProductLevelConfig product_level_config = 4 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.retail.v2.ProductLevelConfigOrBuilder getProductLevelConfigOrBuilder() {
return productLevelConfig_ == null
? com.google.cloud.retail.v2.ProductLevelConfig.getDefaultInstance()
: productLevelConfig_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(displayName_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, displayName_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(4, getProductLevelConfig());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(displayName_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, displayName_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(4, getProductLevelConfig());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.retail.v2.Catalog)) {
return super.equals(obj);
}
com.google.cloud.retail.v2.Catalog other = (com.google.cloud.retail.v2.Catalog) obj;
if (!getName().equals(other.getName())) return false;
if (!getDisplayName().equals(other.getDisplayName())) return false;
if (hasProductLevelConfig() != other.hasProductLevelConfig()) return false;
if (hasProductLevelConfig()) {
if (!getProductLevelConfig().equals(other.getProductLevelConfig())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NAME_FIELD_NUMBER;
hash = (53 * hash) + getName().hashCode();
hash = (37 * hash) + DISPLAY_NAME_FIELD_NUMBER;
hash = (53 * hash) + getDisplayName().hashCode();
if (hasProductLevelConfig()) {
hash = (37 * hash) + PRODUCT_LEVEL_CONFIG_FIELD_NUMBER;
hash = (53 * hash) + getProductLevelConfig().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.retail.v2.Catalog parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2.Catalog parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2.Catalog parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2.Catalog parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2.Catalog parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.retail.v2.Catalog parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.retail.v2.Catalog parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2.Catalog parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2.Catalog parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2.Catalog parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.retail.v2.Catalog parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.retail.v2.Catalog parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.retail.v2.Catalog prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The catalog configuration.
* </pre>
*
* Protobuf type {@code google.cloud.retail.v2.Catalog}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.retail.v2.Catalog)
com.google.cloud.retail.v2.CatalogOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.retail.v2.CatalogProto
.internal_static_google_cloud_retail_v2_Catalog_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.retail.v2.CatalogProto
.internal_static_google_cloud_retail_v2_Catalog_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.retail.v2.Catalog.class,
com.google.cloud.retail.v2.Catalog.Builder.class);
}
// Construct using com.google.cloud.retail.v2.Catalog.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getProductLevelConfigFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
name_ = "";
displayName_ = "";
productLevelConfig_ = null;
if (productLevelConfigBuilder_ != null) {
productLevelConfigBuilder_.dispose();
productLevelConfigBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.retail.v2.CatalogProto
.internal_static_google_cloud_retail_v2_Catalog_descriptor;
}
@java.lang.Override
public com.google.cloud.retail.v2.Catalog getDefaultInstanceForType() {
return com.google.cloud.retail.v2.Catalog.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.retail.v2.Catalog build() {
com.google.cloud.retail.v2.Catalog result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.retail.v2.Catalog buildPartial() {
com.google.cloud.retail.v2.Catalog result = new com.google.cloud.retail.v2.Catalog(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.retail.v2.Catalog result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.name_ = name_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.displayName_ = displayName_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.productLevelConfig_ =
productLevelConfigBuilder_ == null
? productLevelConfig_
: productLevelConfigBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.retail.v2.Catalog) {
return mergeFrom((com.google.cloud.retail.v2.Catalog) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.retail.v2.Catalog other) {
if (other == com.google.cloud.retail.v2.Catalog.getDefaultInstance()) return this;
if (!other.getName().isEmpty()) {
name_ = other.name_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getDisplayName().isEmpty()) {
displayName_ = other.displayName_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasProductLevelConfig()) {
mergeProductLevelConfig(other.getProductLevelConfig());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
name_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
displayName_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 34:
{
input.readMessage(
getProductLevelConfigFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object name_ = "";
/**
*
*
* <pre>
* Required. Immutable. The fully qualified resource name of the catalog.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return The name.
*/
public java.lang.String getName() {
java.lang.Object ref = name_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
name_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Immutable. The fully qualified resource name of the catalog.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return The bytes for name.
*/
public com.google.protobuf.ByteString getNameBytes() {
java.lang.Object ref = name_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
name_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Immutable. The fully qualified resource name of the catalog.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @param value The name to set.
* @return This builder for chaining.
*/
public Builder setName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Immutable. The fully qualified resource name of the catalog.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearName() {
name_ = getDefaultInstance().getName();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Immutable. The fully qualified resource name of the catalog.
* </pre>
*
* <code>
* string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @param value The bytes for name to set.
* @return This builder for chaining.
*/
public Builder setNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
name_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object displayName_ = "";
/**
*
*
* <pre>
* Required. Immutable. The catalog display name.
*
* This field must be a UTF-8 encoded string with a length limit of 128
* characters. Otherwise, an INVALID_ARGUMENT error is returned.
* </pre>
*
* <code>
* string display_name = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return The displayName.
*/
public java.lang.String getDisplayName() {
java.lang.Object ref = displayName_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
displayName_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Immutable. The catalog display name.
*
* This field must be a UTF-8 encoded string with a length limit of 128
* characters. Otherwise, an INVALID_ARGUMENT error is returned.
* </pre>
*
* <code>
* string display_name = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return The bytes for displayName.
*/
public com.google.protobuf.ByteString getDisplayNameBytes() {
java.lang.Object ref = displayName_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
displayName_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Immutable. The catalog display name.
*
* This field must be a UTF-8 encoded string with a length limit of 128
* characters. Otherwise, an INVALID_ARGUMENT error is returned.
* </pre>
*
* <code>
* string display_name = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @param value The displayName to set.
* @return This builder for chaining.
*/
public Builder setDisplayName(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
displayName_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Immutable. The catalog display name.
*
* This field must be a UTF-8 encoded string with a length limit of 128
* characters. Otherwise, an INVALID_ARGUMENT error is returned.
* </pre>
*
* <code>
* string display_name = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearDisplayName() {
displayName_ = getDefaultInstance().getDisplayName();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Immutable. The catalog display name.
*
* This field must be a UTF-8 encoded string with a length limit of 128
* characters. Otherwise, an INVALID_ARGUMENT error is returned.
* </pre>
*
* <code>
* string display_name = 2 [(.google.api.field_behavior) = REQUIRED, (.google.api.field_behavior) = IMMUTABLE];
* </code>
*
* @param value The bytes for displayName to set.
* @return This builder for chaining.
*/
public Builder setDisplayNameBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
displayName_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.retail.v2.ProductLevelConfig productLevelConfig_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.retail.v2.ProductLevelConfig,
com.google.cloud.retail.v2.ProductLevelConfig.Builder,
com.google.cloud.retail.v2.ProductLevelConfigOrBuilder>
productLevelConfigBuilder_;
/**
*
*
* <pre>
* Required. The product level configuration.
* </pre>
*
* <code>
* .google.cloud.retail.v2.ProductLevelConfig product_level_config = 4 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the productLevelConfig field is set.
*/
public boolean hasProductLevelConfig() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Required. The product level configuration.
* </pre>
*
* <code>
* .google.cloud.retail.v2.ProductLevelConfig product_level_config = 4 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The productLevelConfig.
*/
public com.google.cloud.retail.v2.ProductLevelConfig getProductLevelConfig() {
if (productLevelConfigBuilder_ == null) {
return productLevelConfig_ == null
? com.google.cloud.retail.v2.ProductLevelConfig.getDefaultInstance()
: productLevelConfig_;
} else {
return productLevelConfigBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The product level configuration.
* </pre>
*
* <code>
* .google.cloud.retail.v2.ProductLevelConfig product_level_config = 4 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setProductLevelConfig(com.google.cloud.retail.v2.ProductLevelConfig value) {
if (productLevelConfigBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
productLevelConfig_ = value;
} else {
productLevelConfigBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The product level configuration.
* </pre>
*
* <code>
* .google.cloud.retail.v2.ProductLevelConfig product_level_config = 4 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setProductLevelConfig(
com.google.cloud.retail.v2.ProductLevelConfig.Builder builderForValue) {
if (productLevelConfigBuilder_ == null) {
productLevelConfig_ = builderForValue.build();
} else {
productLevelConfigBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The product level configuration.
* </pre>
*
* <code>
* .google.cloud.retail.v2.ProductLevelConfig product_level_config = 4 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeProductLevelConfig(com.google.cloud.retail.v2.ProductLevelConfig value) {
if (productLevelConfigBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& productLevelConfig_ != null
&& productLevelConfig_
!= com.google.cloud.retail.v2.ProductLevelConfig.getDefaultInstance()) {
getProductLevelConfigBuilder().mergeFrom(value);
} else {
productLevelConfig_ = value;
}
} else {
productLevelConfigBuilder_.mergeFrom(value);
}
if (productLevelConfig_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The product level configuration.
* </pre>
*
* <code>
* .google.cloud.retail.v2.ProductLevelConfig product_level_config = 4 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearProductLevelConfig() {
bitField0_ = (bitField0_ & ~0x00000004);
productLevelConfig_ = null;
if (productLevelConfigBuilder_ != null) {
productLevelConfigBuilder_.dispose();
productLevelConfigBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The product level configuration.
* </pre>
*
* <code>
* .google.cloud.retail.v2.ProductLevelConfig product_level_config = 4 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.retail.v2.ProductLevelConfig.Builder getProductLevelConfigBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getProductLevelConfigFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The product level configuration.
* </pre>
*
* <code>
* .google.cloud.retail.v2.ProductLevelConfig product_level_config = 4 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.retail.v2.ProductLevelConfigOrBuilder getProductLevelConfigOrBuilder() {
if (productLevelConfigBuilder_ != null) {
return productLevelConfigBuilder_.getMessageOrBuilder();
} else {
return productLevelConfig_ == null
? com.google.cloud.retail.v2.ProductLevelConfig.getDefaultInstance()
: productLevelConfig_;
}
}
/**
*
*
* <pre>
* Required. The product level configuration.
* </pre>
*
* <code>
* .google.cloud.retail.v2.ProductLevelConfig product_level_config = 4 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.retail.v2.ProductLevelConfig,
com.google.cloud.retail.v2.ProductLevelConfig.Builder,
com.google.cloud.retail.v2.ProductLevelConfigOrBuilder>
getProductLevelConfigFieldBuilder() {
if (productLevelConfigBuilder_ == null) {
productLevelConfigBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.retail.v2.ProductLevelConfig,
com.google.cloud.retail.v2.ProductLevelConfig.Builder,
com.google.cloud.retail.v2.ProductLevelConfigOrBuilder>(
getProductLevelConfig(), getParentForChildren(), isClean());
productLevelConfig_ = null;
}
return productLevelConfigBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.retail.v2.Catalog)
}
// @@protoc_insertion_point(class_scope:google.cloud.retail.v2.Catalog)
private static final com.google.cloud.retail.v2.Catalog DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.retail.v2.Catalog();
}
public static com.google.cloud.retail.v2.Catalog getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<Catalog> PARSER =
new com.google.protobuf.AbstractParser<Catalog>() {
@java.lang.Override
public Catalog parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<Catalog> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<Catalog> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.retail.v2.Catalog getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
openjdk/jtreg | 36,843 | src/share/classes/com/sun/javatest/regtest/exec/Action.java | /*
* Copyright (c) 1998, 2025, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package com.sun.javatest.regtest.exec;
import java.io.BufferedReader;
import java.io.File;
import java.io.FileReader;
import java.io.FileWriter;
import java.io.IOException;
import java.io.PrintWriter;
import java.nio.file.Files;
import java.nio.file.Path;
import java.time.Duration;
import java.time.ZonedDateTime;
import java.time.format.DateTimeFormatter;
import java.util.ArrayList;
import java.util.Collections;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import java.util.jar.Attributes;
import java.util.jar.JarFile;
import java.util.jar.Manifest;
import java.util.regex.Matcher;
import java.util.regex.Pattern;
import com.sun.javatest.Status;
import com.sun.javatest.TestResult;
import com.sun.javatest.regtest.agent.ActionHelper;
import com.sun.javatest.regtest.agent.Flags;
import com.sun.javatest.regtest.agent.SearchPath;
import com.sun.javatest.regtest.config.ExecMode;
import com.sun.javatest.regtest.config.Modules;
import com.sun.javatest.regtest.config.OS;
import com.sun.javatest.regtest.config.ParseException;
import com.sun.javatest.regtest.util.FileUtils;
import com.sun.javatest.regtest.util.StringUtils;
/**
* Action is an abstract base class providing the ability to control the
* behavior of each step in a JDK test description. This class requires that
* all derived classes implement the <em>init</em> method (where arguments are
* processed and other initializations occur) and the <em>run</em> method (where
* the actual work for the action occurs. In addition to these methods, the
* Action abstract class contains a variety of protected methods for parsing and
* logging. All static strings used in Action implementations are also defined
* here.
*/
public abstract class Action extends ActionHelper {
/**
* The null constructor.
*/
public Action() {
} // Action()
/**
* Get the user-visible name of this action.
* @return the user-visible name of this action.
*/
public abstract String getName();
/**
* This method does initial processing of the options and arguments for the
* action. Processing is determined by the requirements of run() which is
* determined by the tag specification.
*
* @param opts The options for the action.
* @param args The arguments for the actions.
* @param reason Indication of why this action was invoked.
* @param script The script.
* @exception ParseException If the options or arguments are not expected
* for the action or are improperly formated.
*/
public void init(Map<String,String> opts, List<String> args, String reason,
RegressionScript script)
throws ParseException {
this.opts = opts;
this.args = args;
this.reason = reason;
this.script = script;
}
/**
* The method that does the work of the action. The necessary work for the
* given action is defined by the tag specification.
*
* @return The result of the action.
* @exception TestRunException If an unexpected error occurs while running
* the test.
*/
public abstract Status run() throws TestRunException;
/**
* Get any source files directly referenced by this action.
* @return the source files used by this action.
**/
public Set<File> getSourceFiles() {
return null;
}
/**
* Get the set of modules directly referenced in this action.
* @return the set of modules used by this action.
*/
public Set<String> getModules() {
return Collections.emptySet();
}
protected Map<String, String> getEnvVars(boolean nativeCode) {
Map<String, String> envVars = script.getEnvVars();
if (nativeCode) {
Path nativeDir = script.getNativeDir();
if (nativeDir != null) {
envVars = new LinkedHashMap<>(envVars);
String libPathName;
OS os = OS.current();
switch (os.family) {
case "aix":
case "os400":
libPathName = "LIBPATH";
break;
case "mac":
libPathName = "DYLD_LIBRARY_PATH";
break;
case "windows":
libPathName = "PATH";
break;
default:
libPathName = "LD_LIBRARY_PATH";
break;
}
String libPath = envVars.get(libPathName);
if (libPath == null) {
envVars.put(libPathName, nativeDir.toString());
} else {
envVars.put(libPathName, libPath + File.pathSeparator + nativeDir);
}
envVars = Collections.unmodifiableMap(envVars);
}
}
return envVars;
}
static synchronized void mkdirs(File dir) {
dir.mkdirs();
}
public File getArgFile() {
Path f = script.absTestWorkFile(getName() + "." + script.getNextSerial() + ".jta");
FileUtils.createDirectories(f.getParent());
return f.toFile();
}
//------------------- parsing -----------------------------------------------
/**
* This method parses the <em>timeout</em> action option used by several
* actions. It verifies that the value of the timeout is a valid number.
*
* @param value The proposed value of the timeout.
* @return An integer representation of the passed value for the
* timeout scaled by the timeout factor.
* @exception ParseException If the string does not have a valid
* interpretation as a number.
*/
protected int parseTimeout(String value) throws ParseException {
if (value == null)
throw new ParseException(PARSE_TIMEOUT_NONE);
try {
return script.getActionTimeout(Integer.parseInt(value));
} catch (NumberFormatException e) {
throw new ParseException(PARSE_TIMEOUT_BAD_INT + value);
}
} // parseTimeout()
/**
* This method parses the <em>fail</em> action option used by several
* actions. It verifies that there is no associated value for the option.
*
* @param value The proposed value of the fail.
* @return True if there is no associated value.
* @exception ParseException If there is an associated value.
*/
protected boolean parseFail(String value) throws ParseException {
if (value != null)
throw new ParseException(PARSE_FAIL_UEXPECT + value);
return true;
} // parseFail()
/**
* This method parses the <em>module</em> action option used by some
* actions.
*
* @param value The proposed value of the module
* @return True if the value is a legal identifier.
* @exception ParseException If there is an associated value.
*/
protected String parseModule(String value) throws ParseException {
if (value == null)
throw new ParseException(PARSE_MODULE_NONE);
if (!isQualifiedName(value))
throw new ParseException(PARSE_MODULE_INVALID + value);
return value;
}
private boolean isQualifiedName(String name) {
boolean beginIdent = true;
for (int i = 0; i < name.length(); i++) {
char ch = name.charAt(i);
if (beginIdent) {
if (!Character.isJavaIdentifierStart(ch)) {
return false;
}
beginIdent = false;
} else {
if (ch == '.') {
beginIdent = true;
} else if (!Character.isJavaIdentifierPart(ch)) {
return false;
}
}
}
return !beginIdent;
}
//--------------------------------------------------------------------------
/**
* Add a grant entry to the policy file so that jtreg and other libraries can read
* JTwork/classes.
* The remaining entries in the policy file should remain the same.
*
* @param fileName The absolute name of the original policy file.
* @return A string indicating the absolute name of the modified policy
* file.
* @throws TestRunException if a problem occurred adding this grant entry.
*/
protected File addGrantEntries(File fileName) throws TestRunException {
return addGrantEntries(fileName, null);
}
/**
* Add a grant entry to the policy file so that jtreg and other libraries can read
* JTwork/classes. An entry is added for the argFile, if one is given.
*
* The remaining entries in the policy file should remain the same.
*
* @param fileName the absolute name of the original policy file
* @param argFile an additional file to be granted permissions
* @return a string indicating the absolute name of the modified policy file
* @throws TestRunException if a problem occurred adding this grant entry.
*/
protected File addGrantEntries(File fileName, File argFile) throws TestRunException {
File newPolicy = script.absTestScratchDir().resolve(fileName.getName() + "_new").toFile();
try {
try (FileWriter fw = new FileWriter(newPolicy)) {
fw.write("// The following grant entries were added by jtreg. Do not edit." + LINESEP);
fw.write("grant {" + LINESEP);
fw.write(" permission java.io.FilePermission \""
+ script.absTestClsTopDir().toString().replace(FILESEP, "{/}")
+ "${/}-\", \"read\";" + LINESEP);
if (argFile != null) {
fw.write(" permission java.io.FilePermission \""
+ argFile.getPath().replace(FILESEP, "{/}")
+ "\", \"read\";" + LINESEP);
}
fw.write("};" + LINESEP);
List<Path> libs = new ArrayList<>();
libs.addAll(script.getJavaTestClassPath().asList());
if (script.isJUnitRequired()) {
libs.addAll(script.getJUnitPath().asList());
}
if (script.isTestNGRequired()) {
libs.addAll(script.getTestNGPath().asList());
}
for (Path lib : libs) {
fw.write("grant codebase \"" + lib.toUri() + "\" {" + LINESEP);
fw.write(" permission java.security.AllPermission;" + LINESEP);
fw.write("};" + LINESEP);
}
fw.write(LINESEP);
fw.write("// original policy file:" + LINESEP);
fw.write("// " + fileName + LINESEP);
try (BufferedReader in = new BufferedReader(new FileReader(fileName))) {
String line;
while ((line = in.readLine()) != null) {
fw.write(line + LINESEP);
}
}
}
} catch (IOException e) {
throw new TestRunException(POLICY_WRITE_PROB + newPolicy);
} catch (SecurityException e) {
throw new TestRunException(POLICY_SM_PROB + newPolicy);
}
return newPolicy;
} // addGrantEntries()
/**
* This method parses the <em>policy</em> action option used by several
* actions. It verifies that the indicated policy file exists in the
* directory containing the defining file of the test.
*
* @param value The proposed filename for the policy file.
* @return a file representing the absolute name of the policy file for
* the test.
* @exception ParseException If the passed filename is null, the empty
* string, or does not exist.
*/
protected File parsePolicy(String value) throws ParseException {
if ((value == null) || value.equals(""))
throw new ParseException(MAIN_NO_POLICY_NAME);
File policyFile = script.absTestSrcDir().resolve(value).toFile();
if (!policyFile.exists())
throw new ParseException(MAIN_CANT_FIND_POLICY + policyFile);
return policyFile;
} // parsePolicy()
/**
* This method parses the <em>secure</em> action option used to provide the
* name of a subclass to be installed as the security manager. No
* verification of the existence of the .class is done.
*
* @param value The proposed class name for the security manager.
* @return A string indicating the absolute name of the security manager
* class.
* @exception ParseException If the passed classname is null, the empty
* string
*/
protected String parseSecure(String value) throws ParseException {
if ((value == null) || value.equals(""))
throw new ParseException(MAIN_NO_SECURE_NAME);
return value;
} // parseSecure()
//----------logging methods-------------------------------------------------
/**
* Set up a recording area for the action. The initial contents of the
* default message area are set and will be of the form:
* <pre>
* command: action [command_args]
* reason: [reason_string]
* </pre>
* @param initConfig whether or not to initialize a configuration section
*/
protected void startAction(boolean initConfig) {
long exclusiveAccessWaitMillis = 0;
// if the RegressionScript isn't meant to only check the test description,
// then before starting the action, we check if the RegressionScript
// requires a exclusiveAccess lock and if it does, we acquire it.
if (supportsExclusiveAccess() && !script.isCheck()) {
exclusiveAccessLock = script.getLockIfRequired();
if (exclusiveAccessLock != null) {
long startNanos = System.nanoTime();
exclusiveAccessLock.lock();
exclusiveAccessWaitMillis = Duration.ofNanos(
System.nanoTime() - startNanos).toMillis();
}
}
ZonedDateTime startedAt = ZonedDateTime.now();
startTime = startedAt.toInstant().toEpochMilli();
String name = getName();
section = script.getTestResult().createSection(name);
PrintWriter pw = section.getMessageWriter();
pw.println(LOG_COMMAND + name + " " + StringUtils.join(args, " "));
pw.println(LOG_REASON + reason);
recorder = new ActionRecorder(this);
if (initConfig) {
configWriter = section.createOutput("configuration");
}
if (exclusiveAccessLock != null) {
// log the time spent (in seconds) waiting for exclusiveAccess
pw.println(LOG_EXCLUSIVE_ACCESS_TIME + ((double) exclusiveAccessWaitMillis / 1000.0));
}
pw.println(LOG_STARTED + DATE_TIME_FORMATTER.format(startedAt));
}
/**
* Set the status for the passed action. After this call, the recording area
* for the action becomes immutable.
*
* @param status The final status of the action.
*/
protected void endAction(Status status) {
try {
ZonedDateTime endedAt = ZonedDateTime.now();
long elapsedTime = endedAt.toInstant().toEpochMilli() - startTime;
PrintWriter pw = section.getMessageWriter();
pw.println(LOG_FINISHED + DATE_TIME_FORMATTER.format(endedAt));
pw.println(LOG_ELAPSED_TIME + ((double) elapsedTime / 1000.0));
recorder.close();
section.setStatus(status);
} finally {
if (exclusiveAccessLock != null) {
exclusiveAccessLock.unlock();
}
}
}
/**
* {@return true if the action can run a {@code RegressionScript}
* that has been configured to run exclusively, false otherwise}
*/
protected boolean supportsExclusiveAccess() {
return false;
}
//----------workarounds-------------------------------------------------------
/**
* This method pushes the full, constructed command for the action to the
* log. The constructed command contains the action and its arguments
* modified to run in another process. The command may also contain
* additional things necessary to run the action according to spec. This
* may include things such as a modified classpath, absolute names of files,
* and environment variables.
*
* Used primarily for debugging purposes.
*
* @param action The name of the action currently being processed.
* @param cmdArgs An array of the command to pass to ProcessCommand.
* @param section The section of the result file for this action.
* @see com.sun.javatest.lib.ProcessCommand#run
*/
protected void showCmd(String action, String[] cmdArgs, TestResult.Section section) {
showCmd(action, List.of(cmdArgs), section);
}
protected void showCmd(String action, List<String> cmdArgs, TestResult.Section section) {
PrintWriter pw = section.getMessageWriter();
pw.println(LOG_JT_COMMAND + action);
for (String s: cmdArgs)
pw.print("'" + s + "' ");
pw.println();
} // showCmd()
// this has largely been superseded by the default show mode code
protected void showMode(String action, ExecMode mode, TestResult.Section section) {
PrintWriter pw = section.getMessageWriter();
pw.println("MODE: " + mode);
}
protected void showMode(ExecMode mode) {
showMode(mode, null);
}
protected void showMode(ExecMode mode, Set<String> reasons) {
PrintWriter pw = section.getMessageWriter();
pw.print("Mode: " + mode.name().toLowerCase());
if (reasons != null && !reasons.isEmpty()) {
pw.print(" ");
pw.print(reasons);
}
pw.println();
}
/**
* Given a string, change "\\" into "\\\\" for windows platforms. This method
* must be called exactly once before the string is used to start a new
* process.
*
* @param s The string to translate.
* @return For Windows systems, a modified string. For all other
* systems including i386 (win32 sparc and Linux), the same
* string.
*/
String[] quoteBackslash(String[] s) {
String bs = "\\";
String[] retVal = new String[s.length];
if (System.getProperty("file.separator").equals(bs)) {
for (int i = 0; i < s.length; i++) {
String victim = s[i];
StringBuilder sb = new StringBuilder();
for (int j = 0; j < victim.length(); j++) {
String c = String.valueOf(victim.charAt(j));
sb.append(c);
if (c.equals(bs))
sb.append(c);
}
retVal[i] = sb.toString();
}
} else
retVal = s;
return retVal;
} // quoteBackslash()
/**
* Single quote the given string. This method should be used if the string
* contains characters which should not be interpreted by the shell.
*
* @param s The string to translate.
* @return The same string, surrounded by "'".
*/
String singleQuoteString(String s) {
StringBuilder b = new StringBuilder();
b.append("'").append(s).append("'");
return(b.toString());
} // singleQuoteString()
//--------------------------------------------------------------------------
protected static <T> List<T> join(List<T> l1, List<T> l2) {
List<T> result = new ArrayList<>();
result.addAll(l1);
result.addAll(l2);
return result;
}
//--------------------------------------------------------------------------
Set<String> getModules(SearchPath pp) {
if (pp == null)
return Collections.emptySet();
Set<String> results = new LinkedHashSet<>();
for (Path element : pp.asList()) {
if (Files.isRegularFile(element)) {
getModule(element, results);
} else if (Files.isDirectory(element)) {
for (Path file : FileUtils.listFiles(element)) {
getModule(file, results);
}
}
}
return results;
}
private void getModule(Path file, Set<String> results) {
if (isModule(file)) {
results.add(file.getFileName().toString());
} else if (file.getFileName().toString().endsWith(".jar")) {
results.add(getAutomaticModuleName(file));
}
}
private boolean isModule(Path f) {
if (Files.isDirectory(f)) {
if (script.systemModules.contains(f.getFileName().toString())) {
return true;
}
if (Files.exists(f.resolve("module-info.class")))
return true;
if (Files.exists(f.resolve("module-info.java")))
return true;
}
return false;
}
private static final Map<Path, String> automaticNames = new ConcurrentHashMap<>();
// see java.lang.module.ModulePath.deriveModuleDescriptor
// See ModuleFinder.of for info on determining automatic module names
// https://docs.oracle.com/en/java/javase/11/docs/api/java.base/java/lang/module/ModuleFinder.html#of(java.nio.file.Path...)
private String getAutomaticModuleName(Path f) {
// Step 0: see if already cached
String cached = automaticNames.get(f);
if (cached != null) {
return cached;
}
// Step 1: check for Automatic-Module-Name in thge main jar file manifest
try (JarFile jar = new JarFile(f.toFile())) {
Manifest mf = jar.getManifest();
Attributes attrs = mf.getMainAttributes();
String amn = attrs.getValue("Automatic-Module-Name");
if (amn != null) {
automaticNames.put(f, amn);
return amn;
}
} catch (IOException e) {
script.getMessageWriter().println("Problem reading jar manifest to get Automatic-Module-Name: " + f + " " + e);
}
// Step 2: infer the name from the jar file name
String fn = f.getFileName().toString();
// drop .jar
String mn = fn.substring(0, fn.length()-4);
String vs = null;
// find first occurrence of -${NUMBER}. or -${NUMBER}$
Matcher matcher = Pattern.compile("-(\\d+(\\.|$))").matcher(mn);
if (matcher.find()) {
int start = matcher.start();
// drop tail (ignore version info)
mn = mn.substring(0, start);
}
// finally clean up the module name
mn = mn.replaceAll("[^A-Za-z0-9]", ".") // replace non-alphanumeric
.replaceAll("(\\.)(\\1)+", ".") // collapse repeating dots
.replaceAll("^\\.", "") // drop leading dots
.replaceAll("\\.$", ""); // drop trailing dots
return mn;
}
//----------module exports----------------------------------------------------
protected List<String> getExtraModuleConfigOptions(Modules.Phase phase) {
if (!script.getTestJDK().hasModules())
return Collections.emptyList();
Modules modules = script.getModules();
boolean needAddExports = false;
Set<String> addModules = null;
for (Modules.Entry e: modules) {
String m = e.moduleName;
if (e.needAddExports()) {
needAddExports = true;
}
if (addModules == null) {
addModules = new LinkedHashSet<>();
}
addModules.add(m);
}
if (!needAddExports && addModules == null) {
return Collections.emptyList();
}
List<String> list = new ArrayList<>();
if (addModules != null) {
list.add("--add-modules");
list.add(StringUtils.join(addModules, ","));
}
for (Modules.Entry e: modules) {
if (e.packageName != null) {
if (e.addExports) {
list.add("--add-exports");
list.add(e.moduleName + "/" + e.packageName + "=ALL-UNNAMED");
}
if (e.addOpens && (phase == Modules.Phase.DYNAMIC)) {
list.add("--add-opens");
list.add(e.moduleName + "/" + e.packageName + "=ALL-UNNAMED");
}
}
}
PrintWriter pw = section.getMessageWriter();
pw.println("Additional options from @modules: " + StringUtils.join(list, " "));
return list;
}
protected boolean includesOption(String option, String arg, List<String> options) {
boolean seenOption = false;
for (String opt: options) {
if (opt.equals(option + "=" + arg)) {
return true;
} else if (opt.equals(option)) {
seenOption = true;
} else if (seenOption && opt.equals(arg)) {
return true;
} else {
seenOption = false;
}
}
return false;
}
//----------misc statics----------------------------------------------------
protected static final String FILESEP = System.getProperty("file.separator");
protected static final String LINESEP = System.getProperty("line.separator");
// This is a hack to deal with the fact that the implementation of
// Runtime.exec() for Windows stringifies the arguments.
protected static final String EXECQUOTE = (System.getProperty("os.name").startsWith("Windows") ? "\"" : "");
public static final String
REASON_ASSUMED_ACTION = "ASSUMED_ACTION",
REASON_USER_SPECIFIED = "USER_SPECIFIED",
REASON_ASSUMED_BUILD = "ASSUMED_BUILD",
REASON_FILE_TOO_OLD = "FILE_OUT_OF_DATE";
protected static final String
SREASON_ASSUMED_ACTION= "Assumed action based on file name: run ",
SREASON_USER_SPECIFIED= "User specified action: run ",
SREASON_ASSUMED_BUILD = "Named class compiled on demand",
SREASON_FILE_TOO_OLD = ".class file out of date or does not exist";
// These are all of the error messages used in all actions.
protected static final String
PARSE_TIMEOUT_NONE = "No timeout value",
PARSE_TIMEOUT_BAD_INT = "Bad integer specification: ",
PARSE_FAIL_UEXPECT = "Unexpected value for `fail': ",
PARSE_MODULE_NONE = "No module name",
PARSE_MODULE_INVALID = "Invalid module name",
// policy and security manager
PARSE_BAD_OPT_JDK = "Option not allowed using provided test JDK: ",
PARSE_NO_POLICY_NAME = "No policy file name",
PARSE_CANT_FIND_POLICY= "Can't find policy file: ",
PARSE_NO_SECURE_NAME = "No security manager file name",
PARSE_POLICY_OTHERVM = "`/policy' and `/java.security.policy` require use of `/othervm'",
PARSE_SECURE_OTHERVM = "`/secure' requires use of `/othervm'",
PARSE_TIMEOUT_MANUAL = "`/manual' disables use of `/timeout'",
POLICY_WRITE_PROB = "Problems writing new policy file: ",
POLICY_SM_PROB = "Unable to create new policy file: ",
LOG_COMMAND = "command: ",
LOG_RESULT = " result: ",
LOG_JT_COMMAND = "JavaTest command: ",
LOG_REASON = "reason: ",
LOG_ELAPSED_TIME = "elapsed time (seconds): ",
LOG_EXCLUSIVE_ACCESS_TIME = "exclusiveAccess wait time (seconds): ",
LOG_STARTED = "started: ",
LOG_FINISHED = "finished: ",
//LOG_JDK = "JDK under test: ",
// COMMON
// used in: shell, main, applet
EXEC_FAIL = "Execution failed",
EXEC_FAIL_EXPECT = "Execution failed as expected",
EXEC_PASS_UNEXPECT = "Execution passed unexpectedly",
CHECK_PASS = "Test description appears acceptable",
// used in: compile, main
AGENTVM_CANT_GET_VM = "Cannot get VM for test",
AGENTVM_IO_EXCEPTION = "Agent communication error: %s; check console log for any additional details",
AGENTVM_EXCEPTION = "Agent error: %s; check console log for any additional details",
CANT_FIND_SRC = "Can't find source file: ",
// applet
APPLET_ONE_ARG_REQ = "`applet' requires exactly one file argument",
APPLET_BAD_VAL_MANUAL = "Bad value for `manual' option: ",
APPLET_BAD_OPT = "Bad option for applet: ",
APPLET_CANT_FIND_HTML = "Can't find HTML file: ",
APPLET_HTML_READ_PROB = "Problem reading HTML file: ",
APPLET_MISS_ENDBODY = "No </body> tag in ",
APPLET_MISS_APPLET = "No <applet> tag in ",
APPLET_MISS_ENDAPPLET = "No </applet> tag in ",
APPLET_MISS_REQ_ATTRIB= " missing required attribute ",
APPLET_ARCHIVE_USUPP = "`archive' not supported in file: ",
APPLET_MISS_REQ_PARAM = "Missing required name or value for param in <param> tag",
APPLET_CANT_WRITE_ARGS= "Can't write `applet' argument file",
APPLET_SECMGR_FILEOPS = "Unable to create applet argument file",
APPLET_USER_EVAL = ", user evaluated",
APPLET_MANUAL_TEST = "Manual test",
// build
BUILD_UNEXPECT_OPT = "Unexpected options for `build'",
BUILD_NO_CLASSNAME = "No classname(s) provided for `build'",
BUILD_BAD_CLASSNAME = "Bad classname provided for `build': ",
BUILD_NO_COMP_NEED = "No need to compile: ",
BUILD_UP_TO_DATE = "All files up to date",
BUILD_SUCC = "Build successful",
BUILD_LIB_LIST = " in directory-list: ",
BUILD_FUTURE_SOURCE = "WARNING: file %s has a modification time in the future: %s",
BUILD_FUTURE_SOURCE_2 = "Unexpected results may occur",
// clean
CLEAN_SUCC = "Clean successful",
CLEAN_UNEXPECT_OPT = "Unexpected option(s) for `clean'",
CLEAN_NO_CLASSNAME = "No classname(s) provided for `clean'",
CLEAN_BAD_CLASSNAME = "Bad classname provided for `clean': ",
CLEAN_RM_FAILED = "`clean' unable to delete file: ",
CLEAN_SECMGR_PROB = "Problem deleting directory contents: ",
// compile
COMPILE_NO_CLASSNAME = "No classname provided for `compile'",
COMPILE_NO_DOT_JAVA = "No classname ending with `.java' found",
COMPILE_BAD_OPT = "Bad option for compile: ",
COMPILE_OPT_DISALLOW = "Compile option not allowed: ",
COMPILE_NO_REF_NAME = "No reference file name",
COMPILE_CANT_FIND_REF = "Can't find reference file: ",
COMPILE_CANT_READ_REF = "Can't read reference file: ",
COMPILE_GOLD_FAIL = "Output does not match reference file: ",
COMPILE_GOLD_LINE = ", line ",
COMPILE_GOLD_READ_PROB= "Problem reading reference file: ",
COMPILE_MODULES_UEXPECT = "Unexpected value for `modules': ",
COMPILE_CANT_CREATE_ARG_FILE = "Can't create `compile' argument file",
COMPILE_CANT_WRITE_ARGS = "Can't write `compile' argument file",
COMPILE_SECMGR_FILEOPS = "Unable to create `compile' argument file",
COMPILE_PASS_UNEXPECT = "Compilation passed unexpectedly",
COMPILE_PASS = "Compilation successful",
COMPILE_FAIL_EXPECT = "Compilation failed as expected",
COMPILE_FAIL = "Compilation failed",
COMPILE_CANT_RESET_SECMGR= "Cannot reset security manager",
COMPILE_CANT_RESET_PROPS = "Cannot reset system properties",
// ignore
IGNORE_UNEXPECT_OPTS = "Unexpected option(s) for `ignore'",
IGNORE_TEST_IGNORED = "Test ignored",
IGNORE_TEST_IGNORED_C = "Test ignored: ",
IGNORE_TEST_SUPPRESSED = "@ignore suppressed by command line option",
IGNORE_TEST_SUPPRESSED_C = "@ignore suppressed by command line option: ",
// junit
JUNIT_NO_CLASSNAME = "No class provided for `junit'",
JUNIT_BAD_MAIN_ARG = "Bad argument provided for class in `junit'",
// driver
DRIVER_NO_CLASSNAME = "No class provided for `driver'",
DRIVER_UNEXPECT_VMOPT = "VM options not allowed",
DRIVER_BAD_OPT = "Bad option for driver: ",
// main
MAIN_NO_CLASSNAME = "No class provided for `main'",
MAIN_MANUAL_NO_VAL = "Arguments to `manual' option not supported: ",
MAIN_BAD_OPT = "Bad option for main: ",
MAIN_CANT_FIND_SECURE = "Can't find security manager file name: ",
MAIN_BAD_OPT_JDK = "Option not allowed using provided test JDK: ",
MAIN_NO_POLICY_NAME = "No policy file name",
MAIN_CANT_FIND_POLICY = "Can't find policy file: ",
MAIN_POLICY_OTHERVM = "`/policy' requires use of `/othervm'",
MAIN_NO_SECURE_NAME = "No security manager file name",
MAIN_SECURE_OTHERVM = "`/secure' requires use of `/othervm'",
MAIN_UNEXPECT_VMOPT = ": vm option(s) found, need to specify /othervm",
MAIN_POLICY_WRITE_PROB= "Problems writing new policy file: ",
MAIN_POLICY_SM_PROB = "Unable to create new policy file: ",
MAIN_CANT_RESET_SECMGR= "Cannot reset security manager",
MAIN_CANT_RESET_PROPS = "Cannot reset system properties",
MAIN_NO_NATIVES = "Use -nativepath to specify the location of native code",
// runOtherJVM
MAIN_CANT_WRITE_ARGS = "Can't write `main' argument file",
MAIN_SECMGR_FILEOPS = "Unable to create `main' argument file",
// shell
SHELL_NO_SCRIPT_NAME = "No script name provided for `shell'",
SHELL_MANUAL_NO_VAL = "Arguments to `manual' option not supported: ",
SHELL_BAD_OPTION = "Bad option for shell: ";
//----------member variables------------------------------------------------
protected /*final*/ Map<String,String> opts;
protected /*final*/ List<String> args;
protected /*final*/ String reason;
protected /*final*/ RegressionScript script;
protected /*final*/ TestResult.Section section;
protected /*final*/ ActionRecorder recorder;
protected /*final*/ PrintWriter configWriter;
private long startTime;
// used when the action's RegressionScript is configured to
// run in exclusiveAccess.dir
private Lock exclusiveAccessLock;
protected static final boolean showCmd = Flags.get("showCmd");
protected static final boolean showMode = Flags.get("showMode");
protected static final boolean showJDK = Flags.get("showJDK");
// used for logging start/end date time in the report, example "Fri Aug 22 11:12:22.256 UTC 2025"
private static final DateTimeFormatter DATE_TIME_FORMATTER =
DateTimeFormatter.ofPattern("EEE MMM dd HH:mm:ss.SSS zzz yyyy");
}
|
openjdk/skara | 36,646 | forge/src/main/java/org/openjdk/skara/forge/gitlab/GitLabMergeRequest.java | /*
* Copyright (c) 2018, 2025, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package org.openjdk.skara.forge.gitlab;
import org.openjdk.skara.forge.*;
import org.openjdk.skara.host.HostUser;
import org.openjdk.skara.issuetracker.*;
import org.openjdk.skara.json.*;
import org.openjdk.skara.network.*;
import org.openjdk.skara.vcs.*;
import java.net.URI;
import java.nio.charset.StandardCharsets;
import java.time.ZonedDateTime;
import java.util.*;
import java.util.logging.Logger;
import java.util.regex.Pattern;
import java.util.stream.*;
public class GitLabMergeRequest implements PullRequest {
private final JSONValue json;
private final RestRequest request;
private final Logger log = Logger.getLogger("org.openjdk.skara.host");;
private final GitLabRepository repository;
private final GitLabHost host;
// Only cache the label names as those are most commonly used and converting to
// Label objects is expensive. This list is always sorted.
private List<String> labels;
// Lazy cache for comparisonSnapshot
private Object comparisonSnapshot;
private static final int GITLAB_MR_COMMENT_BODY_MAX_SIZE = 64_000;
private static final String DRAFT_PREFIX = "Draft:";
GitLabMergeRequest(GitLabRepository repository, GitLabHost host, JSONValue jsonValue, RestRequest request) {
this.repository = repository;
this.host = host;
this.json = jsonValue;
this.request = request.restrict("merge_requests/" + json.get("iid").toString() + "/");
labels = json.get("labels").stream()
.map(JSONValue::asString)
.sorted()
.collect(Collectors.toList());
}
@Override
public HostedRepository repository() {
return repository;
}
@Override
public IssueProject project() {
return null;
}
@Override
public String id() {
return json.get("iid").toString();
}
@Override
public HostUser author() {
return host.parseAuthorField(json);
}
@Override
public List<Review> reviews() {
class CommitDate {
private Hash hash;
private ZonedDateTime date;
}
var commits = request.get("versions").execute().stream()
.map(JSONValue::asObject)
.map(obj -> {
var ret = new CommitDate();
ret.hash = new Hash(obj.get("head_commit_sha").asString());
ret.date = ZonedDateTime.parse(obj.get("created_at").asString());
return ret;
})
.collect(Collectors.toCollection(ArrayList::new));
// Commits are returned in reverse chronological order. We want them
// primarily in chronological order based on the "created_at" date
// and secondary in the reverse order they originally came in. We can
// trust that List::sort is stable.
Collections.reverse(commits);
commits.sort(Comparator.comparing(cd -> cd.date));
// It's possible to create a merge request without any commits
if (commits.size() == 0) {
return List.of();
}
var currentTargetRef = targetRef();
var notes = request.get("notes").execute();
var reviews = notes.stream()
.map(JSONValue::asObject)
.filter(obj -> obj.get("system").asBoolean())
// This matches both approved and unapproved notes
.filter(obj -> obj.get("body").asString().contains("approved this merge request"))
.map(obj -> {
var reviewerObj = obj.get("author").asObject();
var reviewer = HostUser.create(reviewerObj.get("id").asInt(),
reviewerObj.get("username").asString(),
reviewerObj.get("name").asString());
var verdict = obj.get("body").asString().contains("unapproved") ? Review.Verdict.NONE : Review.Verdict.APPROVED;
var createdAt = ZonedDateTime.parse(obj.get("created_at").asString());
// Find the latest commit that isn't created after our review
Hash hash = null;
for (var cd : commits) {
if (createdAt.isAfter(cd.date)) {
hash = cd.hash;
}
}
var id = obj.get("id").toString();
return new Review(createdAt, reviewer, verdict, hash, id, "", currentTargetRef);
}).toList();
var targetRefChanges = targetRefChanges(notes);
return PullRequest.calculateReviewTargetRefs(reviews, targetRefChanges);
}
private static final Pattern REF_CHANGES_PATTERN = Pattern.compile("changed target branch from `(.*)` to `(.*)`");
private List<ReferenceChange> targetRefChanges(JSONValue notes) {
return notes.stream()
.map(JSONValue::asObject)
.filter(obj -> obj.get("system").asBoolean())
.map(obj -> {
var matcher = REF_CHANGES_PATTERN.matcher(obj.get("body").asString());
if (matcher.matches()) {
return new ReferenceChange(matcher.group(1), matcher.group(2), ZonedDateTime.parse(obj.get("created_at").asString()));
} else {
return null;
}
})
.filter(Objects::nonNull)
.toList();
}
@Override
public List<ReferenceChange> targetRefChanges() {
return targetRefChanges(request.get("notes").execute());
}
@Override
public void addReview(Review.Verdict verdict, String body) {
// Remove any previous awards
var awards = request.get("award_emoji").execute().stream()
.map(JSONValue::asObject)
.filter(obj -> obj.get("name").asString().equals("thumbsup") ||
obj.get("name").asString().equals("thumbsdown") ||
obj.get("name").asString().equals("question"))
.filter(obj -> obj.get("user").get("username").asString().equals(repository.forge().currentUser().username()))
.map(obj -> obj.get("id").toString())
.collect(Collectors.toList());
for (var award : awards) {
request.delete("award_emoji/" + award).execute();
}
String award;
switch (verdict) {
case APPROVED:
award = "thumbsup";
break;
case DISAPPROVED:
award = "thumbsdown";
break;
default:
award = "question";
break;
}
request.post("award_emoji")
.body("name", award)
.execute();
}
@Override
public void updateReview(String id, String body) {
throw new RuntimeException("not implemented yet");
}
private ReviewComment parseReviewComment(String discussionId, ReviewComment parent, JSONObject note) {
int line;
String path;
Hash hash;
var position = note.get("position");
// Is this a line comment?
// For line comments, this field is always set, either to a value or null, but
// for file comments there is no new_line field at all.
if (position.get("new_line") != null) {
// Is the comment on the old or the new version of the file?
if (position.get("new_line").isNull()) {
line = position.get("old_line").asInt();
path = position.get("old_path").asString();
hash = new Hash(position.get("start_sha").asString());
} else {
line = position.get("new_line").asInt();
path = position.get("new_path").asString();
hash = new Hash(position.get("head_sha").asString());
}
} else {
// This comment does not have a line. Gitlab seems to only allow file comments
// on the new file
line = 0;
path = position.get("new_path").asString();
hash = new Hash(position.get("head_sha").asString());
}
var comment = new ReviewComment(parent,
discussionId,
hash,
path,
line,
note.get("id").toString(),
note.get("body").asString(),
HostUser.create(note.get("author").get("id").asInt(),
note.get("author").get("username").asString(),
note.get("author").get("name").asString()),
ZonedDateTime.parse(note.get("created_at").asString()),
ZonedDateTime.parse(note.get("updated_at").asString()));
return comment;
}
@Override
public ReviewComment addReviewComment(Hash base, Hash hash, String path, int line, String body) {
log.fine("Posting a new review comment");
var query = JSON.object()
.put("body", body)
.put("position", JSON.object()
.put("base_sha", base.hex())
.put("start_sha", base.hex())
.put("head_sha", hash.hex())
.put("position_type", "text")
.put("new_path", path)
.put("new_line", line));
var comments = request.post("discussions").body(query).execute();
if (comments.get("notes").asArray().size() != 1) {
throw new RuntimeException("Failed to create review comment");
}
var parsedComment = parseReviewComment(comments.get("id").asString(), null,
comments.get("notes").asArray().get(0).asObject());
log.fine("Id of new review comment: " + parsedComment.id());
return parsedComment;
}
@Override
public ReviewComment addReviewCommentReply(ReviewComment parent, String body) {
var discussionId = parent.threadId();
var comment = request.post("discussions/" + discussionId + "/notes")
.body("body", body)
.execute();
return parseReviewComment(discussionId, parent, comment.asObject());
}
private List<ReviewComment> parseDiscussion(JSONObject discussion) {
var ret = new ArrayList<ReviewComment>();
ReviewComment parent = null;
for (var note : discussion.get("notes").asArray()) {
// Ignore system generated comments
if (note.get("system").asBoolean()) {
continue;
}
// Ignore plain comments
if (!note.contains("position")) {
continue;
}
var comment = parseReviewComment(discussion.get("id").asString(), parent, note.asObject());
parent = comment;
ret.add(comment);
}
return ret;
}
@Override
public List<ReviewComment> reviewComments() {
return request.get("discussions").execute().stream()
.filter(entry -> !entry.get("individual_note").asBoolean())
.flatMap(entry -> parseDiscussion(entry.asObject()).stream())
.collect(Collectors.toList());
}
@Override
public Hash headHash() {
return new Hash(json.get("sha").asString());
}
@Override
public String fetchRef() {
return "merge-requests/" + id() + "/head";
}
@Override
public String sourceRef() {
return json.get("source_branch").asString();
}
@Override
public Optional<HostedRepository> sourceRepository() {
if (json.get("source_project_id").isNull()) {
return Optional.empty();
} else {
var projectId = json.get("source_project_id").asInt();
var project = ((GitLabHost) repository.forge()).getProjectInfo(projectId);
if (project.isEmpty()) {
return Optional.empty();
} else {
return Optional.of(new GitLabRepository((GitLabHost) repository.forge(), project.get()));
}
}
}
@Override
public String targetRef() {
var targetRef = json.get("target_branch").asString();
return targetRef;
}
/**
* In GitLab, if the pull request is in draft mode, the title will include the draft prefix
*/
@Override
public String title() {
var title = json.get("title").asString().strip();
String pattern = "(?i)^draft:?\\s*";
return title.replaceAll(pattern, "").strip();
}
/**
* In GitLab, when the bot attempts to update the pull request title,
* it should check if the pull request is in draft mode.
* If it is, the bot should add the draft prefix.
*/
@Override
public void setTitle(String title) {
if (isDraft()) {
title = DRAFT_PREFIX + " " + title;
}
request.put("")
.body("title", title)
.execute();
}
/**
* This method sets the title without checking if the pull request is in draft mode.
*/
private void setTitleWithoutDraftPrefix(String title) {
request.put("")
.body("title", title)
.execute();
}
@Override
public String body() {
var body = json.get("description").asString();
if (body == null) {
body = "";
}
return body;
}
@Override
public void setBody(String body) {
request.put("")
.body("description", body)
.execute();
}
private Comment parseComment(JSONValue comment) {
var ret = new Comment(comment.get("id").toString(),
comment.get("body").asString(),
HostUser.create(comment.get("author").get("id").asInt(),
comment.get("author").get("username").asString(),
comment.get("author").get("name").asString()),
ZonedDateTime.parse(comment.get("created_at").asString()),
ZonedDateTime.parse(comment.get("updated_at").asString()));
return ret;
}
@Override
public List<Comment> comments() {
return request.get("notes").param("sort", "asc").execute().stream()
.filter(entry -> !entry.contains("position")) // Ignore comments with a position - they are review comments
.filter(entry -> !entry.get("system").asBoolean()) // Ignore system generated comments
.map(this::parseComment)
.collect(Collectors.toList());
}
@Override
public Comment addComment(String body) {
log.fine("Posting a new comment");
body = limitBodySize(body);
var comment = request.post("notes")
.body("body", body)
.execute();
var parsedComment = parseComment(comment);
log.fine("Id of new comment: " + parsedComment.id());
return parsedComment;
}
@Override
public void removeComment(Comment comment) {
throw new RuntimeException("not implemented yet");
}
@Override
public Comment updateComment(String id, String body) {
log.fine("Updating existing comment " + id);
body = limitBodySize(body);
var comment = request.put("notes/" + id)
.body("body", body)
.execute();
var parsedComment = parseComment(comment);
log.fine("Id of updated comment: " + parsedComment.id());
return parsedComment;
}
@Override
public ZonedDateTime createdAt() {
return ZonedDateTime.parse(json.get("created_at").asString());
}
@Override
public ZonedDateTime updatedAt() {
return ZonedDateTime.parse(json.get("updated_at").asString());
}
@Override
public State state() {
if (json.get("state").asString().equals("opened")) {
return State.OPEN;
}
return State.CLOSED;
}
private final String checkMarker = "<!-- Merge request status check message (%s) -->";
private final String checkResultMarker = "<!-- Merge request status check result (%s) (%s) (%s) (%s) -->";
private final String checkResultPattern = "<!-- Merge request status check result \\(([-\\w]+)\\) \\((\\w+)\\) \\(%s\\) \\((\\S+)\\) -->";
private Optional<Comment> getStatusCheckComment(String name) {
var marker = String.format(checkMarker, name);
return comments().stream()
.filter(c -> c.body().contains(marker))
.findFirst();
}
private String encodeMarkdown(String message) {
return message.replaceAll("\n", " \n");
}
private final Pattern checkBodyPattern = Pattern.compile("^# ([^\\n\\r]*)\\R(.*)",
Pattern.DOTALL | Pattern.MULTILINE);
@Override
public Map<String, Check> checks(Hash hash) {
var pattern = Pattern.compile(String.format(checkResultPattern, hash.hex()));
var matchers = comments().stream()
.collect(Collectors.toMap(comment -> comment,
comment -> pattern.matcher(comment.body())));
return matchers.entrySet().stream()
.filter(entry -> entry.getValue().find())
.collect(Collectors.toMap(entry -> entry.getValue().group(1),
entry -> {
var checkBuilder = CheckBuilder.create(entry.getValue().group(1), hash);
checkBuilder.startedAt(entry.getKey().createdAt());
var status = entry.getValue().group(2);
var completedAt = entry.getKey().updatedAt();
switch (status) {
case "RUNNING":
// do nothing
break;
case "SUCCESS":
checkBuilder.complete(true, completedAt);
break;
case "FAILURE":
checkBuilder.complete(false, completedAt);
break;
case "CANCELLED":
checkBuilder.cancel(completedAt);
break;
default:
throw new IllegalStateException("Unknown status: " + status);
}
if (!entry.getValue().group(3).equals("NONE")) {
checkBuilder.metadata(new String(Base64.getDecoder().decode(entry.getValue().group(3)), StandardCharsets.UTF_8));
}
var checkBodyMatcher = checkBodyPattern.matcher(entry.getKey().body());
if (checkBodyMatcher.find()) {
// escapeMarkdown adds an additional space before the newline
var title = checkBodyMatcher.group(1);
var nonEscapedTitle = title.substring(0, title.length() - 2);
checkBuilder.title(nonEscapedTitle);
checkBuilder.summary(checkBodyMatcher.group(2));
}
return checkBuilder.build();
}));
}
private String statusFor(Check check) {
switch (check.status()) {
case IN_PROGRESS:
return "RUNNING";
case SUCCESS:
return "SUCCESS";
case FAILURE:
return "FAILURE";
case CANCELLED:
return "CANCELLED";
default:
throw new RuntimeException("Unknown check status");
}
}
private String metadataFor(Check check) {
if (check.metadata().isPresent()) {
return Base64.getEncoder().encodeToString(check.metadata().get().getBytes(StandardCharsets.UTF_8));
}
return "NONE";
}
private String linkToDiff(String path, Hash hash, int line) {
return "[" + path + " line " + line + "](" + URIBuilder.base(repository.url())
.setPath("/" + repository.name()+ "/blob/" + hash.hex() + "/" + path)
.setAuthentication(null)
.build() + "#L" + Integer.toString(line) + ")";
}
private String bodyFor(Check check) {
var status = check.status();
String body;
switch (status) {
case IN_PROGRESS:
body = ":hourglass_flowing_sand: The merge request check **" + check.name() + "** is currently running...";
break;
case SUCCESS:
body = ":tada: The merge request check **" + check.name() + "** completed successfully!";
break;
case FAILURE:
body = ":warning: The merge request check **" + check.name() + "** identified the following issues:";
break;
case CANCELLED:
body = ":x: The merge request check **" + check.name() + "** has been cancelled.";
break;
default:
throw new RuntimeException("Unknown check status");
}
if (check.title().isPresent()) {
body += encodeMarkdown("\n" + "# " + check.title().get());
}
if (check.summary().isPresent()) {
body += encodeMarkdown("\n" + check.summary().get());
}
for (var annotation : check.annotations()) {
var annotationString = " - ";
switch (annotation.level()) {
case NOTICE:
annotationString += "Notice: ";
break;
case WARNING:
annotationString += "Warning: ";
break;
case FAILURE:
annotationString += "Failure: ";
break;
}
annotationString += linkToDiff(annotation.path(), check.hash(), annotation.startLine());
annotationString += "\n - " + annotation.message().lines().collect(Collectors.joining("\n - "));
body += "\n" + annotationString;
}
return body;
}
private void updateCheckComment(Optional<Comment> previous, Check check) {
var status = statusFor(check);
var metadata = metadataFor(check);
var markers = String.format(checkMarker, check.name()) + "\n" +
String.format(checkResultMarker,
check.name(),
status,
check.hash(),
metadata);
var body = bodyFor(check);
var message = markers + "\n" + body;
previous.ifPresentOrElse(
p -> updateComment(p.id(), message),
() -> addComment(message));
}
@Override
public void createCheck(Check check) {
log.info("Looking for previous status check comment");
var previous = getStatusCheckComment(check.name());
updateCheckComment(previous, check);
}
@Override
public void updateCheck(Check check) {
log.info("Looking for previous status check comment");
var previous = getStatusCheckComment(check.name())
.orElseGet(() -> addComment("Progress deleted?"));
updateCheckComment(Optional.of(previous), check);
}
@Override
public URI changeUrl() {
return URIBuilder.base(webUrl()).appendPath("/diffs").build();
}
@Override
public URI changeUrl(Hash base) {
return URIBuilder.base(webUrl()).appendPath("/diffs")
.setQuery(Map.of("start_sha", List.of(base.hex())))
.build();
}
@Override
public URI commentUrl(Comment comment) {
return URIBuilder.base(webUrl()).appendPath("#note_" + comment.id()).build();
}
@Override
public URI reviewCommentUrl(ReviewComment reviewComment) {
return URIBuilder.base(webUrl()).appendPath("#note_" + reviewComment.id()).build();
}
@Override
public URI reviewUrl(Review review) {
return URIBuilder.base(webUrl()).appendPath("#note_" + review.id()).build();
}
@Override
public boolean isDraft() {
return json.get("draft").asBoolean();
}
@Override
public void setState(State state) {
request.put("")
.body("state_event", state != State.OPEN ? "close" : "reopen")
.execute();
}
private Map<String, Label> labelNameToLabel;
/**
* Lookup a label from the repository labels. Initialize and refresh a cache
* of the repository labels lazily.
*/
private Label labelNameToLabel(String labelName) {
if (labelNameToLabel == null || !labelNameToLabel.containsKey(labelName)) {
labelNameToLabel = repository.labels()
.stream()
.collect(Collectors.toMap(Label::name, l -> l));
}
return labelNameToLabel.get(labelName);
}
@Override
public void addLabel(String label) {
labels = null;
request.put("")
.body("add_labels", label)
.execute();
}
@Override
public void removeLabel(String label) {
labels = null;
request.put("")
.body("remove_labels", label)
.execute();
}
@Override
public void setLabels(List<String> labels) {
request.put("")
.body("labels", String.join(",", labels))
.execute();
this.labels = labels.stream().sorted().toList();
}
@Override
public List<Label> labels() {
return labelNames().stream()
.map(this::labelNameToLabel)
// Avoid throwing NPE for unknown labels
.filter(Objects::nonNull)
.toList();
}
@Override
public List<String> labelNames() {
if (labels == null) {
labels = request.get("").execute().get("labels").stream()
.map(JSONValue::asString)
.sorted()
.collect(Collectors.toList());
}
return labels;
}
@Override
public URI webUrl() {
return URIBuilder.base(repository.webUrl())
.setPath("/" + repository.name() + "/merge_requests/" + id())
.build();
}
@Override
public String toString() {
return "GitLabMergeRequest #" + id() + " by " + author();
}
@Override
public List<HostUser> assignees() {
var assignee = json.get("assignee").asObject();
if (assignee != null) {
var user = repository.forge().user(assignee.get("username").asString());
return List.of(user.get());
}
return Collections.emptyList();
}
@Override
public void setAssignees(List<HostUser> assignees) {
var id = assignees.size() == 0 ? 0 : Integer.valueOf(assignees.get(0).id());
var param = JSON.object().put("assignee_id", id);
request.put().body(param).execute();
if (assignees.size() > 1) {
var rest = assignees.subList(1, assignees.size());
var usernames = rest.stream()
.map(HostUser::username)
.map(username -> "@" + username)
.collect(Collectors.joining(" "));
var comment = usernames + " can you have a look at this merge request?";
addComment(comment);
}
}
@Override
public void makeNotDraft() {
if (isDraft()) {
setTitleWithoutDraftPrefix(title());
}
}
@Override
public Optional<ZonedDateTime> lastMarkedAsDraftTime() {
var draftMessage = "marked this merge request as **draft**";
var notes = request.get("notes").execute();
var lastMarkedAsDraftTime = notes.stream()
.map(JSONValue::asObject)
.filter(obj -> obj.get("system").asBoolean())
.filter(obj -> draftMessage.equals(obj.get("body").asString()))
.map(obj -> ZonedDateTime.parse(obj.get("created_at").asString()))
.max(ZonedDateTime::compareTo);
if (lastMarkedAsDraftTime.isEmpty() && isDraft()) {
return Optional.of(createdAt());
}
return lastMarkedAsDraftTime;
}
@Override
public URI diffUrl() {
return URI.create(webUrl() + ".diff");
}
@Override
public Optional<ZonedDateTime> labelAddedAt(String label) {
return request.get("resource_label_events")
.execute()
.stream()
.map(JSONValue::asObject)
.filter(obj -> obj.contains("action"))
.filter(obj -> obj.get("action").asString().equals("add"))
.filter(obj -> obj.get("label").get("name").asString().equals(label))
.map(o -> ZonedDateTime.parse(o.get("created_at").asString()))
.findFirst();
}
@Override
public void setTargetRef(String targetRef) {
request.put("")
.body("target_branch", targetRef)
.execute();
}
@Override
public URI headUrl() {
return URI.create(webUrl() + "/diffs?commit_id=" + headHash().hex());
}
@Override
public Diff diff() {
var changes = request.get("changes").param("access_raw_diffs", "true").execute();
boolean complete;
if (changes.get("overflow").asBoolean()) {
complete = false;
} else {
complete = !changes.get("changes_count").asString().contains("+");
}
var targetHash = repository.branchHash(targetRef()).orElseThrow();
return repository.toDiff(targetHash, headHash(), changes.get("changes"), complete);
}
@Override
public Optional<HostUser> closedBy() {
if (!isClosed()) {
return Optional.empty();
}
JSONValue closedBy = json.get("closed_by");
// When MR is in what Skara considers "closed", it may also have been
// integrated directly in Gitlab. If so, the closed_by field will be
// null, and the merged_by field will be populated instead.
if (closedBy.isNull()) {
closedBy = json.get("merged_by");
}
if (closedBy.isNull()) {
return Optional.empty();
}
return Optional.of(host.parseAuthorObject(closedBy.asObject()));
}
@Override
public URI filesUrl(Hash hash) {
var versionId = request.get("versions").execute().stream()
.filter(version -> hash.hex().equals(version.get("head_commit_sha").asString()))
.map(version -> String.valueOf(version.get("id").asInt()))
.findFirst();
String uri;
if (versionId.isEmpty()) {
uri = "/" + repository.name() + "/-/merge_requests/" + id() + "/diffs?commit_id=" + hash.hex();
} else {
uri = "/" + repository.name() + "/-/merge_requests/" + id() + "/diffs?diff_id=" + versionId.get();
}
return host.getWebUri(uri);
}
@Override
public Optional<ZonedDateTime> lastForcePushTime() {
return Optional.empty();
}
@Override
public Optional<Hash> findIntegratedCommitHash() {
return findIntegratedCommitHash(List.of(repository.forge().currentUser().id()));
}
/**
* For GitLabMergeRequest, a snapshot comparison needs to include the comments
* and reviews, which are both part of the general "notes".
*/
@Override
public Object snapshot() {
if (comparisonSnapshot == null) {
comparisonSnapshot = List.of(json, request.get("notes").execute());
}
return comparisonSnapshot;
}
/**
* Equality for a GitLabMergeRequest is based on the data snapshot retrieved
* when the instance was created.
*/
@Override
public boolean equals(Object o) {
if (this == o) {
return true;
}
if (o == null || getClass() != o.getClass()) {
return false;
}
GitLabMergeRequest that = (GitLabMergeRequest) o;
return json.equals(that.json);
}
@Override
public int hashCode() {
return Objects.hash(json);
}
private String limitBodySize(String body) {
if (body.length() > GITLAB_MR_COMMENT_BODY_MAX_SIZE) {
return body.substring(0, GITLAB_MR_COMMENT_BODY_MAX_SIZE)
+ "...";
}
return body;
}
@Override
public ZonedDateTime lastTouchedTime() {
Set<String> relevantEvents = Set.of(
"marked this merge request as **ready**",
"marked this merge request as **draft**"
);
// Get relevant note timestamps
Stream<ZonedDateTime> noteTimes = request.get("notes")
.execute().stream()
.map(JSONValue::asObject)
.filter(note -> relevantEvents.contains(note.get("body").asString()))
.map(note -> ZonedDateTime.parse(note.get("created_at").asString()));
// Get commit dates
Stream<ZonedDateTime> commitTimes = request.get("commits")
.execute().stream()
.map(JSONValue::asObject)
.map(commit -> ZonedDateTime.parse(commit.get("committed_date").asString()));
// Combine and get latest time
return Stream.concat(noteTimes, commitTimes)
.max(ZonedDateTime::compareTo)
.orElseGet(this::createdAt);
}
}
|
googleapis/google-cloud-java | 36,979 | java-dialogflow-cx/proto-google-cloud-dialogflow-cx-v3beta1/src/main/java/com/google/cloud/dialogflow/cx/v3beta1/UpdateSecuritySettingsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/cx/v3beta1/security_settings.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.cx.v3beta1;
/**
*
*
* <pre>
* The request message for
* [SecuritySettingsService.UpdateSecuritySettings][google.cloud.dialogflow.cx.v3beta1.SecuritySettingsService.UpdateSecuritySettings].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest}
*/
public final class UpdateSecuritySettingsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest)
UpdateSecuritySettingsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateSecuritySettingsRequest.newBuilder() to construct.
private UpdateSecuritySettingsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateSecuritySettingsRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateSecuritySettingsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3beta1.SecuritySettingsProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_UpdateSecuritySettingsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3beta1.SecuritySettingsProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_UpdateSecuritySettingsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest.class,
com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest.Builder.class);
}
private int bitField0_;
public static final int SECURITY_SETTINGS_FIELD_NUMBER = 1;
private com.google.cloud.dialogflow.cx.v3beta1.SecuritySettings securitySettings_;
/**
*
*
* <pre>
* Required. [SecuritySettings] object that contains values for each of the
* fields to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.SecuritySettings security_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the securitySettings field is set.
*/
@java.lang.Override
public boolean hasSecuritySettings() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. [SecuritySettings] object that contains values for each of the
* fields to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.SecuritySettings security_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The securitySettings.
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.SecuritySettings getSecuritySettings() {
return securitySettings_ == null
? com.google.cloud.dialogflow.cx.v3beta1.SecuritySettings.getDefaultInstance()
: securitySettings_;
}
/**
*
*
* <pre>
* Required. [SecuritySettings] object that contains values for each of the
* fields to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.SecuritySettings security_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.SecuritySettingsOrBuilder
getSecuritySettingsOrBuilder() {
return securitySettings_ == null
? com.google.cloud.dialogflow.cx.v3beta1.SecuritySettings.getDefaultInstance()
: securitySettings_;
}
public static final int UPDATE_MASK_FIELD_NUMBER = 2;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. The mask to control which fields get updated. If the mask is not
* present, all fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The mask to control which fields get updated. If the mask is not
* present, all fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. The mask to control which fields get updated. If the mask is not
* present, all fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getSecuritySettings());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateMask());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getSecuritySettings());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest other =
(com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest) obj;
if (hasSecuritySettings() != other.hasSecuritySettings()) return false;
if (hasSecuritySettings()) {
if (!getSecuritySettings().equals(other.getSecuritySettings())) return false;
}
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasSecuritySettings()) {
hash = (37 * hash) + SECURITY_SETTINGS_FIELD_NUMBER;
hash = (53 * hash) + getSecuritySettings().hashCode();
}
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The request message for
* [SecuritySettingsService.UpdateSecuritySettings][google.cloud.dialogflow.cx.v3beta1.SecuritySettingsService.UpdateSecuritySettings].
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest)
com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.cx.v3beta1.SecuritySettingsProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_UpdateSecuritySettingsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.cx.v3beta1.SecuritySettingsProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_UpdateSecuritySettingsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest.class,
com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest.Builder.class);
}
// Construct using
// com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getSecuritySettingsFieldBuilder();
getUpdateMaskFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
securitySettings_ = null;
if (securitySettingsBuilder_ != null) {
securitySettingsBuilder_.dispose();
securitySettingsBuilder_ = null;
}
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.cx.v3beta1.SecuritySettingsProto
.internal_static_google_cloud_dialogflow_cx_v3beta1_UpdateSecuritySettingsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest
getDefaultInstanceForType() {
return com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest build() {
com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest buildPartial() {
com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest result =
new com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.securitySettings_ =
securitySettingsBuilder_ == null ? securitySettings_ : securitySettingsBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest) {
return mergeFrom(
(com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest other) {
if (other
== com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest
.getDefaultInstance()) return this;
if (other.hasSecuritySettings()) {
mergeSecuritySettings(other.getSecuritySettings());
}
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(
getSecuritySettingsFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.dialogflow.cx.v3beta1.SecuritySettings securitySettings_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3beta1.SecuritySettings,
com.google.cloud.dialogflow.cx.v3beta1.SecuritySettings.Builder,
com.google.cloud.dialogflow.cx.v3beta1.SecuritySettingsOrBuilder>
securitySettingsBuilder_;
/**
*
*
* <pre>
* Required. [SecuritySettings] object that contains values for each of the
* fields to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.SecuritySettings security_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the securitySettings field is set.
*/
public boolean hasSecuritySettings() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. [SecuritySettings] object that contains values for each of the
* fields to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.SecuritySettings security_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The securitySettings.
*/
public com.google.cloud.dialogflow.cx.v3beta1.SecuritySettings getSecuritySettings() {
if (securitySettingsBuilder_ == null) {
return securitySettings_ == null
? com.google.cloud.dialogflow.cx.v3beta1.SecuritySettings.getDefaultInstance()
: securitySettings_;
} else {
return securitySettingsBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. [SecuritySettings] object that contains values for each of the
* fields to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.SecuritySettings security_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setSecuritySettings(
com.google.cloud.dialogflow.cx.v3beta1.SecuritySettings value) {
if (securitySettingsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
securitySettings_ = value;
} else {
securitySettingsBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. [SecuritySettings] object that contains values for each of the
* fields to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.SecuritySettings security_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setSecuritySettings(
com.google.cloud.dialogflow.cx.v3beta1.SecuritySettings.Builder builderForValue) {
if (securitySettingsBuilder_ == null) {
securitySettings_ = builderForValue.build();
} else {
securitySettingsBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. [SecuritySettings] object that contains values for each of the
* fields to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.SecuritySettings security_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeSecuritySettings(
com.google.cloud.dialogflow.cx.v3beta1.SecuritySettings value) {
if (securitySettingsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& securitySettings_ != null
&& securitySettings_
!= com.google.cloud.dialogflow.cx.v3beta1.SecuritySettings.getDefaultInstance()) {
getSecuritySettingsBuilder().mergeFrom(value);
} else {
securitySettings_ = value;
}
} else {
securitySettingsBuilder_.mergeFrom(value);
}
if (securitySettings_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. [SecuritySettings] object that contains values for each of the
* fields to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.SecuritySettings security_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearSecuritySettings() {
bitField0_ = (bitField0_ & ~0x00000001);
securitySettings_ = null;
if (securitySettingsBuilder_ != null) {
securitySettingsBuilder_.dispose();
securitySettingsBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. [SecuritySettings] object that contains values for each of the
* fields to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.SecuritySettings security_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dialogflow.cx.v3beta1.SecuritySettings.Builder
getSecuritySettingsBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getSecuritySettingsFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. [SecuritySettings] object that contains values for each of the
* fields to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.SecuritySettings security_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.dialogflow.cx.v3beta1.SecuritySettingsOrBuilder
getSecuritySettingsOrBuilder() {
if (securitySettingsBuilder_ != null) {
return securitySettingsBuilder_.getMessageOrBuilder();
} else {
return securitySettings_ == null
? com.google.cloud.dialogflow.cx.v3beta1.SecuritySettings.getDefaultInstance()
: securitySettings_;
}
}
/**
*
*
* <pre>
* Required. [SecuritySettings] object that contains values for each of the
* fields to update.
* </pre>
*
* <code>
* .google.cloud.dialogflow.cx.v3beta1.SecuritySettings security_settings = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3beta1.SecuritySettings,
com.google.cloud.dialogflow.cx.v3beta1.SecuritySettings.Builder,
com.google.cloud.dialogflow.cx.v3beta1.SecuritySettingsOrBuilder>
getSecuritySettingsFieldBuilder() {
if (securitySettingsBuilder_ == null) {
securitySettingsBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.dialogflow.cx.v3beta1.SecuritySettings,
com.google.cloud.dialogflow.cx.v3beta1.SecuritySettings.Builder,
com.google.cloud.dialogflow.cx.v3beta1.SecuritySettingsOrBuilder>(
getSecuritySettings(), getParentForChildren(), isClean());
securitySettings_ = null;
}
return securitySettingsBuilder_;
}
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. The mask to control which fields get updated. If the mask is not
* present, all fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The mask to control which fields get updated. If the mask is not
* present, all fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The mask to control which fields get updated. If the mask is not
* present, all fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The mask to control which fields get updated. If the mask is not
* present, all fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The mask to control which fields get updated. If the mask is not
* present, all fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The mask to control which fields get updated. If the mask is not
* present, all fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000002);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The mask to control which fields get updated. If the mask is not
* present, all fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The mask to control which fields get updated. If the mask is not
* present, all fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. The mask to control which fields get updated. If the mask is not
* present, all fields will be updated.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest)
private static final com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest();
}
public static com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateSecuritySettingsRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateSecuritySettingsRequest>() {
@java.lang.Override
public UpdateSecuritySettingsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateSecuritySettingsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateSecuritySettingsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.cx.v3beta1.UpdateSecuritySettingsRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,118 | java-retail/grpc-google-cloud-retail-v2/src/main/java/com/google/cloud/retail/v2/ControlServiceGrpc.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.retail.v2;
import static io.grpc.MethodDescriptor.generateFullMethodName;
/**
*
*
* <pre>
* Service for modifying Control.
* </pre>
*/
@javax.annotation.Generated(
value = "by gRPC proto compiler",
comments = "Source: google/cloud/retail/v2/control_service.proto")
@io.grpc.stub.annotations.GrpcGenerated
public final class ControlServiceGrpc {
private ControlServiceGrpc() {}
public static final java.lang.String SERVICE_NAME = "google.cloud.retail.v2.ControlService";
// Static method descriptors that strictly reflect the proto.
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.retail.v2.CreateControlRequest, com.google.cloud.retail.v2.Control>
getCreateControlMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "CreateControl",
requestType = com.google.cloud.retail.v2.CreateControlRequest.class,
responseType = com.google.cloud.retail.v2.Control.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.retail.v2.CreateControlRequest, com.google.cloud.retail.v2.Control>
getCreateControlMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.retail.v2.CreateControlRequest, com.google.cloud.retail.v2.Control>
getCreateControlMethod;
if ((getCreateControlMethod = ControlServiceGrpc.getCreateControlMethod) == null) {
synchronized (ControlServiceGrpc.class) {
if ((getCreateControlMethod = ControlServiceGrpc.getCreateControlMethod) == null) {
ControlServiceGrpc.getCreateControlMethod =
getCreateControlMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.retail.v2.CreateControlRequest,
com.google.cloud.retail.v2.Control>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "CreateControl"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.retail.v2.CreateControlRequest.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.retail.v2.Control.getDefaultInstance()))
.setSchemaDescriptor(
new ControlServiceMethodDescriptorSupplier("CreateControl"))
.build();
}
}
}
return getCreateControlMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.retail.v2.DeleteControlRequest, com.google.protobuf.Empty>
getDeleteControlMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "DeleteControl",
requestType = com.google.cloud.retail.v2.DeleteControlRequest.class,
responseType = com.google.protobuf.Empty.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.retail.v2.DeleteControlRequest, com.google.protobuf.Empty>
getDeleteControlMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.retail.v2.DeleteControlRequest, com.google.protobuf.Empty>
getDeleteControlMethod;
if ((getDeleteControlMethod = ControlServiceGrpc.getDeleteControlMethod) == null) {
synchronized (ControlServiceGrpc.class) {
if ((getDeleteControlMethod = ControlServiceGrpc.getDeleteControlMethod) == null) {
ControlServiceGrpc.getDeleteControlMethod =
getDeleteControlMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.retail.v2.DeleteControlRequest, com.google.protobuf.Empty>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "DeleteControl"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.retail.v2.DeleteControlRequest.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.protobuf.Empty.getDefaultInstance()))
.setSchemaDescriptor(
new ControlServiceMethodDescriptorSupplier("DeleteControl"))
.build();
}
}
}
return getDeleteControlMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.retail.v2.UpdateControlRequest, com.google.cloud.retail.v2.Control>
getUpdateControlMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "UpdateControl",
requestType = com.google.cloud.retail.v2.UpdateControlRequest.class,
responseType = com.google.cloud.retail.v2.Control.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.retail.v2.UpdateControlRequest, com.google.cloud.retail.v2.Control>
getUpdateControlMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.retail.v2.UpdateControlRequest, com.google.cloud.retail.v2.Control>
getUpdateControlMethod;
if ((getUpdateControlMethod = ControlServiceGrpc.getUpdateControlMethod) == null) {
synchronized (ControlServiceGrpc.class) {
if ((getUpdateControlMethod = ControlServiceGrpc.getUpdateControlMethod) == null) {
ControlServiceGrpc.getUpdateControlMethod =
getUpdateControlMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.retail.v2.UpdateControlRequest,
com.google.cloud.retail.v2.Control>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "UpdateControl"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.retail.v2.UpdateControlRequest.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.retail.v2.Control.getDefaultInstance()))
.setSchemaDescriptor(
new ControlServiceMethodDescriptorSupplier("UpdateControl"))
.build();
}
}
}
return getUpdateControlMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.retail.v2.GetControlRequest, com.google.cloud.retail.v2.Control>
getGetControlMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "GetControl",
requestType = com.google.cloud.retail.v2.GetControlRequest.class,
responseType = com.google.cloud.retail.v2.Control.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.retail.v2.GetControlRequest, com.google.cloud.retail.v2.Control>
getGetControlMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.retail.v2.GetControlRequest, com.google.cloud.retail.v2.Control>
getGetControlMethod;
if ((getGetControlMethod = ControlServiceGrpc.getGetControlMethod) == null) {
synchronized (ControlServiceGrpc.class) {
if ((getGetControlMethod = ControlServiceGrpc.getGetControlMethod) == null) {
ControlServiceGrpc.getGetControlMethod =
getGetControlMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.retail.v2.GetControlRequest,
com.google.cloud.retail.v2.Control>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetControl"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.retail.v2.GetControlRequest.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.retail.v2.Control.getDefaultInstance()))
.setSchemaDescriptor(new ControlServiceMethodDescriptorSupplier("GetControl"))
.build();
}
}
}
return getGetControlMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.retail.v2.ListControlsRequest,
com.google.cloud.retail.v2.ListControlsResponse>
getListControlsMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "ListControls",
requestType = com.google.cloud.retail.v2.ListControlsRequest.class,
responseType = com.google.cloud.retail.v2.ListControlsResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.retail.v2.ListControlsRequest,
com.google.cloud.retail.v2.ListControlsResponse>
getListControlsMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.retail.v2.ListControlsRequest,
com.google.cloud.retail.v2.ListControlsResponse>
getListControlsMethod;
if ((getListControlsMethod = ControlServiceGrpc.getListControlsMethod) == null) {
synchronized (ControlServiceGrpc.class) {
if ((getListControlsMethod = ControlServiceGrpc.getListControlsMethod) == null) {
ControlServiceGrpc.getListControlsMethod =
getListControlsMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.retail.v2.ListControlsRequest,
com.google.cloud.retail.v2.ListControlsResponse>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListControls"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.retail.v2.ListControlsRequest.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.retail.v2.ListControlsResponse.getDefaultInstance()))
.setSchemaDescriptor(
new ControlServiceMethodDescriptorSupplier("ListControls"))
.build();
}
}
}
return getListControlsMethod;
}
/** Creates a new async stub that supports all call types for the service */
public static ControlServiceStub newStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<ControlServiceStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<ControlServiceStub>() {
@java.lang.Override
public ControlServiceStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ControlServiceStub(channel, callOptions);
}
};
return ControlServiceStub.newStub(factory, channel);
}
/** Creates a new blocking-style stub that supports all types of calls on the service */
public static ControlServiceBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<ControlServiceBlockingV2Stub> factory =
new io.grpc.stub.AbstractStub.StubFactory<ControlServiceBlockingV2Stub>() {
@java.lang.Override
public ControlServiceBlockingV2Stub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ControlServiceBlockingV2Stub(channel, callOptions);
}
};
return ControlServiceBlockingV2Stub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static ControlServiceBlockingStub newBlockingStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<ControlServiceBlockingStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<ControlServiceBlockingStub>() {
@java.lang.Override
public ControlServiceBlockingStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ControlServiceBlockingStub(channel, callOptions);
}
};
return ControlServiceBlockingStub.newStub(factory, channel);
}
/** Creates a new ListenableFuture-style stub that supports unary calls on the service */
public static ControlServiceFutureStub newFutureStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<ControlServiceFutureStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<ControlServiceFutureStub>() {
@java.lang.Override
public ControlServiceFutureStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ControlServiceFutureStub(channel, callOptions);
}
};
return ControlServiceFutureStub.newStub(factory, channel);
}
/**
*
*
* <pre>
* Service for modifying Control.
* </pre>
*/
public interface AsyncService {
/**
*
*
* <pre>
* Creates a Control.
* If the [Control][google.cloud.retail.v2.Control] to create already exists,
* an ALREADY_EXISTS error is returned.
* </pre>
*/
default void createControl(
com.google.cloud.retail.v2.CreateControlRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.retail.v2.Control> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getCreateControlMethod(), responseObserver);
}
/**
*
*
* <pre>
* Deletes a Control.
* If the [Control][google.cloud.retail.v2.Control] to delete does not exist,
* a NOT_FOUND error is returned.
* </pre>
*/
default void deleteControl(
com.google.cloud.retail.v2.DeleteControlRequest request,
io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getDeleteControlMethod(), responseObserver);
}
/**
*
*
* <pre>
* Updates a Control.
* [Control][google.cloud.retail.v2.Control] cannot be set to a different
* oneof field, if so an INVALID_ARGUMENT is returned. If the
* [Control][google.cloud.retail.v2.Control] to update does not exist, a
* NOT_FOUND error is returned.
* </pre>
*/
default void updateControl(
com.google.cloud.retail.v2.UpdateControlRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.retail.v2.Control> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getUpdateControlMethod(), responseObserver);
}
/**
*
*
* <pre>
* Gets a Control.
* </pre>
*/
default void getControl(
com.google.cloud.retail.v2.GetControlRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.retail.v2.Control> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetControlMethod(), responseObserver);
}
/**
*
*
* <pre>
* Lists all Controls by their parent
* [Catalog][google.cloud.retail.v2.Catalog].
* </pre>
*/
default void listControls(
com.google.cloud.retail.v2.ListControlsRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.retail.v2.ListControlsResponse>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getListControlsMethod(), responseObserver);
}
}
/**
* Base class for the server implementation of the service ControlService.
*
* <pre>
* Service for modifying Control.
* </pre>
*/
public abstract static class ControlServiceImplBase
implements io.grpc.BindableService, AsyncService {
@java.lang.Override
public final io.grpc.ServerServiceDefinition bindService() {
return ControlServiceGrpc.bindService(this);
}
}
/**
* A stub to allow clients to do asynchronous rpc calls to service ControlService.
*
* <pre>
* Service for modifying Control.
* </pre>
*/
public static final class ControlServiceStub
extends io.grpc.stub.AbstractAsyncStub<ControlServiceStub> {
private ControlServiceStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected ControlServiceStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ControlServiceStub(channel, callOptions);
}
/**
*
*
* <pre>
* Creates a Control.
* If the [Control][google.cloud.retail.v2.Control] to create already exists,
* an ALREADY_EXISTS error is returned.
* </pre>
*/
public void createControl(
com.google.cloud.retail.v2.CreateControlRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.retail.v2.Control> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getCreateControlMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Deletes a Control.
* If the [Control][google.cloud.retail.v2.Control] to delete does not exist,
* a NOT_FOUND error is returned.
* </pre>
*/
public void deleteControl(
com.google.cloud.retail.v2.DeleteControlRequest request,
io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getDeleteControlMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Updates a Control.
* [Control][google.cloud.retail.v2.Control] cannot be set to a different
* oneof field, if so an INVALID_ARGUMENT is returned. If the
* [Control][google.cloud.retail.v2.Control] to update does not exist, a
* NOT_FOUND error is returned.
* </pre>
*/
public void updateControl(
com.google.cloud.retail.v2.UpdateControlRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.retail.v2.Control> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getUpdateControlMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Gets a Control.
* </pre>
*/
public void getControl(
com.google.cloud.retail.v2.GetControlRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.retail.v2.Control> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getGetControlMethod(), getCallOptions()), request, responseObserver);
}
/**
*
*
* <pre>
* Lists all Controls by their parent
* [Catalog][google.cloud.retail.v2.Catalog].
* </pre>
*/
public void listControls(
com.google.cloud.retail.v2.ListControlsRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.retail.v2.ListControlsResponse>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getListControlsMethod(), getCallOptions()),
request,
responseObserver);
}
}
/**
* A stub to allow clients to do synchronous rpc calls to service ControlService.
*
* <pre>
* Service for modifying Control.
* </pre>
*/
public static final class ControlServiceBlockingV2Stub
extends io.grpc.stub.AbstractBlockingStub<ControlServiceBlockingV2Stub> {
private ControlServiceBlockingV2Stub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected ControlServiceBlockingV2Stub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ControlServiceBlockingV2Stub(channel, callOptions);
}
/**
*
*
* <pre>
* Creates a Control.
* If the [Control][google.cloud.retail.v2.Control] to create already exists,
* an ALREADY_EXISTS error is returned.
* </pre>
*/
public com.google.cloud.retail.v2.Control createControl(
com.google.cloud.retail.v2.CreateControlRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCreateControlMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Deletes a Control.
* If the [Control][google.cloud.retail.v2.Control] to delete does not exist,
* a NOT_FOUND error is returned.
* </pre>
*/
public com.google.protobuf.Empty deleteControl(
com.google.cloud.retail.v2.DeleteControlRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getDeleteControlMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Updates a Control.
* [Control][google.cloud.retail.v2.Control] cannot be set to a different
* oneof field, if so an INVALID_ARGUMENT is returned. If the
* [Control][google.cloud.retail.v2.Control] to update does not exist, a
* NOT_FOUND error is returned.
* </pre>
*/
public com.google.cloud.retail.v2.Control updateControl(
com.google.cloud.retail.v2.UpdateControlRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getUpdateControlMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Gets a Control.
* </pre>
*/
public com.google.cloud.retail.v2.Control getControl(
com.google.cloud.retail.v2.GetControlRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetControlMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Lists all Controls by their parent
* [Catalog][google.cloud.retail.v2.Catalog].
* </pre>
*/
public com.google.cloud.retail.v2.ListControlsResponse listControls(
com.google.cloud.retail.v2.ListControlsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getListControlsMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do limited synchronous rpc calls to service ControlService.
*
* <pre>
* Service for modifying Control.
* </pre>
*/
public static final class ControlServiceBlockingStub
extends io.grpc.stub.AbstractBlockingStub<ControlServiceBlockingStub> {
private ControlServiceBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected ControlServiceBlockingStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ControlServiceBlockingStub(channel, callOptions);
}
/**
*
*
* <pre>
* Creates a Control.
* If the [Control][google.cloud.retail.v2.Control] to create already exists,
* an ALREADY_EXISTS error is returned.
* </pre>
*/
public com.google.cloud.retail.v2.Control createControl(
com.google.cloud.retail.v2.CreateControlRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCreateControlMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Deletes a Control.
* If the [Control][google.cloud.retail.v2.Control] to delete does not exist,
* a NOT_FOUND error is returned.
* </pre>
*/
public com.google.protobuf.Empty deleteControl(
com.google.cloud.retail.v2.DeleteControlRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getDeleteControlMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Updates a Control.
* [Control][google.cloud.retail.v2.Control] cannot be set to a different
* oneof field, if so an INVALID_ARGUMENT is returned. If the
* [Control][google.cloud.retail.v2.Control] to update does not exist, a
* NOT_FOUND error is returned.
* </pre>
*/
public com.google.cloud.retail.v2.Control updateControl(
com.google.cloud.retail.v2.UpdateControlRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getUpdateControlMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Gets a Control.
* </pre>
*/
public com.google.cloud.retail.v2.Control getControl(
com.google.cloud.retail.v2.GetControlRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetControlMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Lists all Controls by their parent
* [Catalog][google.cloud.retail.v2.Catalog].
* </pre>
*/
public com.google.cloud.retail.v2.ListControlsResponse listControls(
com.google.cloud.retail.v2.ListControlsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getListControlsMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do ListenableFuture-style rpc calls to service ControlService.
*
* <pre>
* Service for modifying Control.
* </pre>
*/
public static final class ControlServiceFutureStub
extends io.grpc.stub.AbstractFutureStub<ControlServiceFutureStub> {
private ControlServiceFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected ControlServiceFutureStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new ControlServiceFutureStub(channel, callOptions);
}
/**
*
*
* <pre>
* Creates a Control.
* If the [Control][google.cloud.retail.v2.Control] to create already exists,
* an ALREADY_EXISTS error is returned.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.cloud.retail.v2.Control>
createControl(com.google.cloud.retail.v2.CreateControlRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getCreateControlMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Deletes a Control.
* If the [Control][google.cloud.retail.v2.Control] to delete does not exist,
* a NOT_FOUND error is returned.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.protobuf.Empty>
deleteControl(com.google.cloud.retail.v2.DeleteControlRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getDeleteControlMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Updates a Control.
* [Control][google.cloud.retail.v2.Control] cannot be set to a different
* oneof field, if so an INVALID_ARGUMENT is returned. If the
* [Control][google.cloud.retail.v2.Control] to update does not exist, a
* NOT_FOUND error is returned.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.cloud.retail.v2.Control>
updateControl(com.google.cloud.retail.v2.UpdateControlRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getUpdateControlMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Gets a Control.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.cloud.retail.v2.Control>
getControl(com.google.cloud.retail.v2.GetControlRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getGetControlMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Lists all Controls by their parent
* [Catalog][google.cloud.retail.v2.Catalog].
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.retail.v2.ListControlsResponse>
listControls(com.google.cloud.retail.v2.ListControlsRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getListControlsMethod(), getCallOptions()), request);
}
}
private static final int METHODID_CREATE_CONTROL = 0;
private static final int METHODID_DELETE_CONTROL = 1;
private static final int METHODID_UPDATE_CONTROL = 2;
private static final int METHODID_GET_CONTROL = 3;
private static final int METHODID_LIST_CONTROLS = 4;
private static final class MethodHandlers<Req, Resp>
implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final AsyncService serviceImpl;
private final int methodId;
MethodHandlers(AsyncService serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_CREATE_CONTROL:
serviceImpl.createControl(
(com.google.cloud.retail.v2.CreateControlRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.retail.v2.Control>) responseObserver);
break;
case METHODID_DELETE_CONTROL:
serviceImpl.deleteControl(
(com.google.cloud.retail.v2.DeleteControlRequest) request,
(io.grpc.stub.StreamObserver<com.google.protobuf.Empty>) responseObserver);
break;
case METHODID_UPDATE_CONTROL:
serviceImpl.updateControl(
(com.google.cloud.retail.v2.UpdateControlRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.retail.v2.Control>) responseObserver);
break;
case METHODID_GET_CONTROL:
serviceImpl.getControl(
(com.google.cloud.retail.v2.GetControlRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.retail.v2.Control>) responseObserver);
break;
case METHODID_LIST_CONTROLS:
serviceImpl.listControls(
(com.google.cloud.retail.v2.ListControlsRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.retail.v2.ListControlsResponse>)
responseObserver);
break;
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
}
public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
getCreateControlMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.retail.v2.CreateControlRequest,
com.google.cloud.retail.v2.Control>(service, METHODID_CREATE_CONTROL)))
.addMethod(
getDeleteControlMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.retail.v2.DeleteControlRequest, com.google.protobuf.Empty>(
service, METHODID_DELETE_CONTROL)))
.addMethod(
getUpdateControlMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.retail.v2.UpdateControlRequest,
com.google.cloud.retail.v2.Control>(service, METHODID_UPDATE_CONTROL)))
.addMethod(
getGetControlMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.retail.v2.GetControlRequest,
com.google.cloud.retail.v2.Control>(service, METHODID_GET_CONTROL)))
.addMethod(
getListControlsMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.retail.v2.ListControlsRequest,
com.google.cloud.retail.v2.ListControlsResponse>(
service, METHODID_LIST_CONTROLS)))
.build();
}
private abstract static class ControlServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoFileDescriptorSupplier,
io.grpc.protobuf.ProtoServiceDescriptorSupplier {
ControlServiceBaseDescriptorSupplier() {}
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return com.google.cloud.retail.v2.ControlServiceProto.getDescriptor();
}
@java.lang.Override
public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
return getFileDescriptor().findServiceByName("ControlService");
}
}
private static final class ControlServiceFileDescriptorSupplier
extends ControlServiceBaseDescriptorSupplier {
ControlServiceFileDescriptorSupplier() {}
}
private static final class ControlServiceMethodDescriptorSupplier
extends ControlServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final java.lang.String methodName;
ControlServiceMethodDescriptorSupplier(java.lang.String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (ControlServiceGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor =
result =
io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new ControlServiceFileDescriptorSupplier())
.addMethod(getCreateControlMethod())
.addMethod(getDeleteControlMethod())
.addMethod(getUpdateControlMethod())
.addMethod(getGetControlMethod())
.addMethod(getListControlsMethod())
.build();
}
}
}
return result;
}
}
|
googleapis/google-cloud-java | 37,213 | java-eventarc-publishing/grpc-google-cloud-eventarc-publishing-v1/src/main/java/com/google/cloud/eventarc/publishing/v1/PublisherGrpc.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.eventarc.publishing.v1;
import static io.grpc.MethodDescriptor.generateFullMethodName;
/**
*
*
* <pre>
* Eventarc processes events generated by an event provider and delivers them to
* a subscriber.
* An event provider is a software-as-a-service (SaaS) system or
* product that can generate and deliver events through Eventarc.
* A third-party event provider is an event provider from outside of Google.
* A partner is a third-party event provider that is integrated with Eventarc.
* A subscriber is a Google Cloud customer interested in receiving events.
* Channel is a first-class Eventarc resource that is created and managed
* by the subscriber in their Google Cloud project. A Channel represents a
* subscriber's intent to receive events from an event provider. A Channel is
* associated with exactly one event provider.
* ChannelConnection is a first-class Eventarc resource that
* is created and managed by the partner in their Google Cloud project. A
* ChannelConnection represents a connection between a partner and a
* subscriber's Channel. A ChannelConnection has a one-to-one mapping with a
* Channel.
* Bus is a first-class Eventarc resource that is created and managed in a
* Google Cloud project. A Bus provides a discoverable endpoint for events and
* is a router that receives all events published by event providers and
* delivers them to zero or more subscribers.
* Publisher allows an event provider to publish events to Eventarc.
* </pre>
*/
@javax.annotation.Generated(
value = "by gRPC proto compiler",
comments = "Source: google/cloud/eventarc/publishing/v1/publisher.proto")
@io.grpc.stub.annotations.GrpcGenerated
public final class PublisherGrpc {
private PublisherGrpc() {}
public static final java.lang.String SERVICE_NAME =
"google.cloud.eventarc.publishing.v1.Publisher";
// Static method descriptors that strictly reflect the proto.
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.eventarc.publishing.v1.PublishChannelConnectionEventsRequest,
com.google.cloud.eventarc.publishing.v1.PublishChannelConnectionEventsResponse>
getPublishChannelConnectionEventsMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "PublishChannelConnectionEvents",
requestType =
com.google.cloud.eventarc.publishing.v1.PublishChannelConnectionEventsRequest.class,
responseType =
com.google.cloud.eventarc.publishing.v1.PublishChannelConnectionEventsResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.eventarc.publishing.v1.PublishChannelConnectionEventsRequest,
com.google.cloud.eventarc.publishing.v1.PublishChannelConnectionEventsResponse>
getPublishChannelConnectionEventsMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.eventarc.publishing.v1.PublishChannelConnectionEventsRequest,
com.google.cloud.eventarc.publishing.v1.PublishChannelConnectionEventsResponse>
getPublishChannelConnectionEventsMethod;
if ((getPublishChannelConnectionEventsMethod =
PublisherGrpc.getPublishChannelConnectionEventsMethod)
== null) {
synchronized (PublisherGrpc.class) {
if ((getPublishChannelConnectionEventsMethod =
PublisherGrpc.getPublishChannelConnectionEventsMethod)
== null) {
PublisherGrpc.getPublishChannelConnectionEventsMethod =
getPublishChannelConnectionEventsMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.eventarc.publishing.v1
.PublishChannelConnectionEventsRequest,
com.google.cloud.eventarc.publishing.v1
.PublishChannelConnectionEventsResponse>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(
generateFullMethodName(SERVICE_NAME, "PublishChannelConnectionEvents"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.eventarc.publishing.v1
.PublishChannelConnectionEventsRequest.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.eventarc.publishing.v1
.PublishChannelConnectionEventsResponse.getDefaultInstance()))
.setSchemaDescriptor(
new PublisherMethodDescriptorSupplier("PublishChannelConnectionEvents"))
.build();
}
}
}
return getPublishChannelConnectionEventsMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.eventarc.publishing.v1.PublishEventsRequest,
com.google.cloud.eventarc.publishing.v1.PublishEventsResponse>
getPublishEventsMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "PublishEvents",
requestType = com.google.cloud.eventarc.publishing.v1.PublishEventsRequest.class,
responseType = com.google.cloud.eventarc.publishing.v1.PublishEventsResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.eventarc.publishing.v1.PublishEventsRequest,
com.google.cloud.eventarc.publishing.v1.PublishEventsResponse>
getPublishEventsMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.eventarc.publishing.v1.PublishEventsRequest,
com.google.cloud.eventarc.publishing.v1.PublishEventsResponse>
getPublishEventsMethod;
if ((getPublishEventsMethod = PublisherGrpc.getPublishEventsMethod) == null) {
synchronized (PublisherGrpc.class) {
if ((getPublishEventsMethod = PublisherGrpc.getPublishEventsMethod) == null) {
PublisherGrpc.getPublishEventsMethod =
getPublishEventsMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.eventarc.publishing.v1.PublishEventsRequest,
com.google.cloud.eventarc.publishing.v1.PublishEventsResponse>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "PublishEvents"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.eventarc.publishing.v1.PublishEventsRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.eventarc.publishing.v1.PublishEventsResponse
.getDefaultInstance()))
.setSchemaDescriptor(new PublisherMethodDescriptorSupplier("PublishEvents"))
.build();
}
}
}
return getPublishEventsMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.eventarc.publishing.v1.PublishRequest,
com.google.cloud.eventarc.publishing.v1.PublishResponse>
getPublishMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "Publish",
requestType = com.google.cloud.eventarc.publishing.v1.PublishRequest.class,
responseType = com.google.cloud.eventarc.publishing.v1.PublishResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.eventarc.publishing.v1.PublishRequest,
com.google.cloud.eventarc.publishing.v1.PublishResponse>
getPublishMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.eventarc.publishing.v1.PublishRequest,
com.google.cloud.eventarc.publishing.v1.PublishResponse>
getPublishMethod;
if ((getPublishMethod = PublisherGrpc.getPublishMethod) == null) {
synchronized (PublisherGrpc.class) {
if ((getPublishMethod = PublisherGrpc.getPublishMethod) == null) {
PublisherGrpc.getPublishMethod =
getPublishMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.eventarc.publishing.v1.PublishRequest,
com.google.cloud.eventarc.publishing.v1.PublishResponse>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "Publish"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.eventarc.publishing.v1.PublishRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.eventarc.publishing.v1.PublishResponse
.getDefaultInstance()))
.setSchemaDescriptor(new PublisherMethodDescriptorSupplier("Publish"))
.build();
}
}
}
return getPublishMethod;
}
/** Creates a new async stub that supports all call types for the service */
public static PublisherStub newStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<PublisherStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<PublisherStub>() {
@java.lang.Override
public PublisherStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new PublisherStub(channel, callOptions);
}
};
return PublisherStub.newStub(factory, channel);
}
/** Creates a new blocking-style stub that supports all types of calls on the service */
public static PublisherBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<PublisherBlockingV2Stub> factory =
new io.grpc.stub.AbstractStub.StubFactory<PublisherBlockingV2Stub>() {
@java.lang.Override
public PublisherBlockingV2Stub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new PublisherBlockingV2Stub(channel, callOptions);
}
};
return PublisherBlockingV2Stub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static PublisherBlockingStub newBlockingStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<PublisherBlockingStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<PublisherBlockingStub>() {
@java.lang.Override
public PublisherBlockingStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new PublisherBlockingStub(channel, callOptions);
}
};
return PublisherBlockingStub.newStub(factory, channel);
}
/** Creates a new ListenableFuture-style stub that supports unary calls on the service */
public static PublisherFutureStub newFutureStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<PublisherFutureStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<PublisherFutureStub>() {
@java.lang.Override
public PublisherFutureStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new PublisherFutureStub(channel, callOptions);
}
};
return PublisherFutureStub.newStub(factory, channel);
}
/**
*
*
* <pre>
* Eventarc processes events generated by an event provider and delivers them to
* a subscriber.
* An event provider is a software-as-a-service (SaaS) system or
* product that can generate and deliver events through Eventarc.
* A third-party event provider is an event provider from outside of Google.
* A partner is a third-party event provider that is integrated with Eventarc.
* A subscriber is a Google Cloud customer interested in receiving events.
* Channel is a first-class Eventarc resource that is created and managed
* by the subscriber in their Google Cloud project. A Channel represents a
* subscriber's intent to receive events from an event provider. A Channel is
* associated with exactly one event provider.
* ChannelConnection is a first-class Eventarc resource that
* is created and managed by the partner in their Google Cloud project. A
* ChannelConnection represents a connection between a partner and a
* subscriber's Channel. A ChannelConnection has a one-to-one mapping with a
* Channel.
* Bus is a first-class Eventarc resource that is created and managed in a
* Google Cloud project. A Bus provides a discoverable endpoint for events and
* is a router that receives all events published by event providers and
* delivers them to zero or more subscribers.
* Publisher allows an event provider to publish events to Eventarc.
* </pre>
*/
public interface AsyncService {
/**
*
*
* <pre>
* Publish events to a ChannelConnection in a partner's project.
* </pre>
*/
default void publishChannelConnectionEvents(
com.google.cloud.eventarc.publishing.v1.PublishChannelConnectionEventsRequest request,
io.grpc.stub.StreamObserver<
com.google.cloud.eventarc.publishing.v1.PublishChannelConnectionEventsResponse>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getPublishChannelConnectionEventsMethod(), responseObserver);
}
/**
*
*
* <pre>
* Publish events to a subscriber's channel.
* </pre>
*/
default void publishEvents(
com.google.cloud.eventarc.publishing.v1.PublishEventsRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.eventarc.publishing.v1.PublishEventsResponse>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getPublishEventsMethod(), responseObserver);
}
/**
*
*
* <pre>
* Publish events to a message bus.
* </pre>
*/
default void publish(
com.google.cloud.eventarc.publishing.v1.PublishRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.eventarc.publishing.v1.PublishResponse>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getPublishMethod(), responseObserver);
}
}
/**
* Base class for the server implementation of the service Publisher.
*
* <pre>
* Eventarc processes events generated by an event provider and delivers them to
* a subscriber.
* An event provider is a software-as-a-service (SaaS) system or
* product that can generate and deliver events through Eventarc.
* A third-party event provider is an event provider from outside of Google.
* A partner is a third-party event provider that is integrated with Eventarc.
* A subscriber is a Google Cloud customer interested in receiving events.
* Channel is a first-class Eventarc resource that is created and managed
* by the subscriber in their Google Cloud project. A Channel represents a
* subscriber's intent to receive events from an event provider. A Channel is
* associated with exactly one event provider.
* ChannelConnection is a first-class Eventarc resource that
* is created and managed by the partner in their Google Cloud project. A
* ChannelConnection represents a connection between a partner and a
* subscriber's Channel. A ChannelConnection has a one-to-one mapping with a
* Channel.
* Bus is a first-class Eventarc resource that is created and managed in a
* Google Cloud project. A Bus provides a discoverable endpoint for events and
* is a router that receives all events published by event providers and
* delivers them to zero or more subscribers.
* Publisher allows an event provider to publish events to Eventarc.
* </pre>
*/
public abstract static class PublisherImplBase implements io.grpc.BindableService, AsyncService {
@java.lang.Override
public final io.grpc.ServerServiceDefinition bindService() {
return PublisherGrpc.bindService(this);
}
}
/**
* A stub to allow clients to do asynchronous rpc calls to service Publisher.
*
* <pre>
* Eventarc processes events generated by an event provider and delivers them to
* a subscriber.
* An event provider is a software-as-a-service (SaaS) system or
* product that can generate and deliver events through Eventarc.
* A third-party event provider is an event provider from outside of Google.
* A partner is a third-party event provider that is integrated with Eventarc.
* A subscriber is a Google Cloud customer interested in receiving events.
* Channel is a first-class Eventarc resource that is created and managed
* by the subscriber in their Google Cloud project. A Channel represents a
* subscriber's intent to receive events from an event provider. A Channel is
* associated with exactly one event provider.
* ChannelConnection is a first-class Eventarc resource that
* is created and managed by the partner in their Google Cloud project. A
* ChannelConnection represents a connection between a partner and a
* subscriber's Channel. A ChannelConnection has a one-to-one mapping with a
* Channel.
* Bus is a first-class Eventarc resource that is created and managed in a
* Google Cloud project. A Bus provides a discoverable endpoint for events and
* is a router that receives all events published by event providers and
* delivers them to zero or more subscribers.
* Publisher allows an event provider to publish events to Eventarc.
* </pre>
*/
public static final class PublisherStub extends io.grpc.stub.AbstractAsyncStub<PublisherStub> {
private PublisherStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected PublisherStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new PublisherStub(channel, callOptions);
}
/**
*
*
* <pre>
* Publish events to a ChannelConnection in a partner's project.
* </pre>
*/
public void publishChannelConnectionEvents(
com.google.cloud.eventarc.publishing.v1.PublishChannelConnectionEventsRequest request,
io.grpc.stub.StreamObserver<
com.google.cloud.eventarc.publishing.v1.PublishChannelConnectionEventsResponse>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getPublishChannelConnectionEventsMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Publish events to a subscriber's channel.
* </pre>
*/
public void publishEvents(
com.google.cloud.eventarc.publishing.v1.PublishEventsRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.eventarc.publishing.v1.PublishEventsResponse>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getPublishEventsMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Publish events to a message bus.
* </pre>
*/
public void publish(
com.google.cloud.eventarc.publishing.v1.PublishRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.eventarc.publishing.v1.PublishResponse>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getPublishMethod(), getCallOptions()), request, responseObserver);
}
}
/**
* A stub to allow clients to do synchronous rpc calls to service Publisher.
*
* <pre>
* Eventarc processes events generated by an event provider and delivers them to
* a subscriber.
* An event provider is a software-as-a-service (SaaS) system or
* product that can generate and deliver events through Eventarc.
* A third-party event provider is an event provider from outside of Google.
* A partner is a third-party event provider that is integrated with Eventarc.
* A subscriber is a Google Cloud customer interested in receiving events.
* Channel is a first-class Eventarc resource that is created and managed
* by the subscriber in their Google Cloud project. A Channel represents a
* subscriber's intent to receive events from an event provider. A Channel is
* associated with exactly one event provider.
* ChannelConnection is a first-class Eventarc resource that
* is created and managed by the partner in their Google Cloud project. A
* ChannelConnection represents a connection between a partner and a
* subscriber's Channel. A ChannelConnection has a one-to-one mapping with a
* Channel.
* Bus is a first-class Eventarc resource that is created and managed in a
* Google Cloud project. A Bus provides a discoverable endpoint for events and
* is a router that receives all events published by event providers and
* delivers them to zero or more subscribers.
* Publisher allows an event provider to publish events to Eventarc.
* </pre>
*/
public static final class PublisherBlockingV2Stub
extends io.grpc.stub.AbstractBlockingStub<PublisherBlockingV2Stub> {
private PublisherBlockingV2Stub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected PublisherBlockingV2Stub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new PublisherBlockingV2Stub(channel, callOptions);
}
/**
*
*
* <pre>
* Publish events to a ChannelConnection in a partner's project.
* </pre>
*/
public com.google.cloud.eventarc.publishing.v1.PublishChannelConnectionEventsResponse
publishChannelConnectionEvents(
com.google.cloud.eventarc.publishing.v1.PublishChannelConnectionEventsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getPublishChannelConnectionEventsMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Publish events to a subscriber's channel.
* </pre>
*/
public com.google.cloud.eventarc.publishing.v1.PublishEventsResponse publishEvents(
com.google.cloud.eventarc.publishing.v1.PublishEventsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getPublishEventsMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Publish events to a message bus.
* </pre>
*/
public com.google.cloud.eventarc.publishing.v1.PublishResponse publish(
com.google.cloud.eventarc.publishing.v1.PublishRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getPublishMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do limited synchronous rpc calls to service Publisher.
*
* <pre>
* Eventarc processes events generated by an event provider and delivers them to
* a subscriber.
* An event provider is a software-as-a-service (SaaS) system or
* product that can generate and deliver events through Eventarc.
* A third-party event provider is an event provider from outside of Google.
* A partner is a third-party event provider that is integrated with Eventarc.
* A subscriber is a Google Cloud customer interested in receiving events.
* Channel is a first-class Eventarc resource that is created and managed
* by the subscriber in their Google Cloud project. A Channel represents a
* subscriber's intent to receive events from an event provider. A Channel is
* associated with exactly one event provider.
* ChannelConnection is a first-class Eventarc resource that
* is created and managed by the partner in their Google Cloud project. A
* ChannelConnection represents a connection between a partner and a
* subscriber's Channel. A ChannelConnection has a one-to-one mapping with a
* Channel.
* Bus is a first-class Eventarc resource that is created and managed in a
* Google Cloud project. A Bus provides a discoverable endpoint for events and
* is a router that receives all events published by event providers and
* delivers them to zero or more subscribers.
* Publisher allows an event provider to publish events to Eventarc.
* </pre>
*/
public static final class PublisherBlockingStub
extends io.grpc.stub.AbstractBlockingStub<PublisherBlockingStub> {
private PublisherBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected PublisherBlockingStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new PublisherBlockingStub(channel, callOptions);
}
/**
*
*
* <pre>
* Publish events to a ChannelConnection in a partner's project.
* </pre>
*/
public com.google.cloud.eventarc.publishing.v1.PublishChannelConnectionEventsResponse
publishChannelConnectionEvents(
com.google.cloud.eventarc.publishing.v1.PublishChannelConnectionEventsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getPublishChannelConnectionEventsMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Publish events to a subscriber's channel.
* </pre>
*/
public com.google.cloud.eventarc.publishing.v1.PublishEventsResponse publishEvents(
com.google.cloud.eventarc.publishing.v1.PublishEventsRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getPublishEventsMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Publish events to a message bus.
* </pre>
*/
public com.google.cloud.eventarc.publishing.v1.PublishResponse publish(
com.google.cloud.eventarc.publishing.v1.PublishRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getPublishMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do ListenableFuture-style rpc calls to service Publisher.
*
* <pre>
* Eventarc processes events generated by an event provider and delivers them to
* a subscriber.
* An event provider is a software-as-a-service (SaaS) system or
* product that can generate and deliver events through Eventarc.
* A third-party event provider is an event provider from outside of Google.
* A partner is a third-party event provider that is integrated with Eventarc.
* A subscriber is a Google Cloud customer interested in receiving events.
* Channel is a first-class Eventarc resource that is created and managed
* by the subscriber in their Google Cloud project. A Channel represents a
* subscriber's intent to receive events from an event provider. A Channel is
* associated with exactly one event provider.
* ChannelConnection is a first-class Eventarc resource that
* is created and managed by the partner in their Google Cloud project. A
* ChannelConnection represents a connection between a partner and a
* subscriber's Channel. A ChannelConnection has a one-to-one mapping with a
* Channel.
* Bus is a first-class Eventarc resource that is created and managed in a
* Google Cloud project. A Bus provides a discoverable endpoint for events and
* is a router that receives all events published by event providers and
* delivers them to zero or more subscribers.
* Publisher allows an event provider to publish events to Eventarc.
* </pre>
*/
public static final class PublisherFutureStub
extends io.grpc.stub.AbstractFutureStub<PublisherFutureStub> {
private PublisherFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected PublisherFutureStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new PublisherFutureStub(channel, callOptions);
}
/**
*
*
* <pre>
* Publish events to a ChannelConnection in a partner's project.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.eventarc.publishing.v1.PublishChannelConnectionEventsResponse>
publishChannelConnectionEvents(
com.google.cloud.eventarc.publishing.v1.PublishChannelConnectionEventsRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getPublishChannelConnectionEventsMethod(), getCallOptions()),
request);
}
/**
*
*
* <pre>
* Publish events to a subscriber's channel.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.eventarc.publishing.v1.PublishEventsResponse>
publishEvents(com.google.cloud.eventarc.publishing.v1.PublishEventsRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getPublishEventsMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Publish events to a message bus.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.eventarc.publishing.v1.PublishResponse>
publish(com.google.cloud.eventarc.publishing.v1.PublishRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getPublishMethod(), getCallOptions()), request);
}
}
private static final int METHODID_PUBLISH_CHANNEL_CONNECTION_EVENTS = 0;
private static final int METHODID_PUBLISH_EVENTS = 1;
private static final int METHODID_PUBLISH = 2;
private static final class MethodHandlers<Req, Resp>
implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final AsyncService serviceImpl;
private final int methodId;
MethodHandlers(AsyncService serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_PUBLISH_CHANNEL_CONNECTION_EVENTS:
serviceImpl.publishChannelConnectionEvents(
(com.google.cloud.eventarc.publishing.v1.PublishChannelConnectionEventsRequest)
request,
(io.grpc.stub.StreamObserver<
com.google.cloud.eventarc.publishing.v1
.PublishChannelConnectionEventsResponse>)
responseObserver);
break;
case METHODID_PUBLISH_EVENTS:
serviceImpl.publishEvents(
(com.google.cloud.eventarc.publishing.v1.PublishEventsRequest) request,
(io.grpc.stub.StreamObserver<
com.google.cloud.eventarc.publishing.v1.PublishEventsResponse>)
responseObserver);
break;
case METHODID_PUBLISH:
serviceImpl.publish(
(com.google.cloud.eventarc.publishing.v1.PublishRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.eventarc.publishing.v1.PublishResponse>)
responseObserver);
break;
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
}
public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
getPublishChannelConnectionEventsMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.eventarc.publishing.v1.PublishChannelConnectionEventsRequest,
com.google.cloud.eventarc.publishing.v1.PublishChannelConnectionEventsResponse>(
service, METHODID_PUBLISH_CHANNEL_CONNECTION_EVENTS)))
.addMethod(
getPublishEventsMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.eventarc.publishing.v1.PublishEventsRequest,
com.google.cloud.eventarc.publishing.v1.PublishEventsResponse>(
service, METHODID_PUBLISH_EVENTS)))
.addMethod(
getPublishMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.eventarc.publishing.v1.PublishRequest,
com.google.cloud.eventarc.publishing.v1.PublishResponse>(
service, METHODID_PUBLISH)))
.build();
}
private abstract static class PublisherBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoFileDescriptorSupplier,
io.grpc.protobuf.ProtoServiceDescriptorSupplier {
PublisherBaseDescriptorSupplier() {}
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return com.google.cloud.eventarc.publishing.v1.PublisherProto.getDescriptor();
}
@java.lang.Override
public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
return getFileDescriptor().findServiceByName("Publisher");
}
}
private static final class PublisherFileDescriptorSupplier
extends PublisherBaseDescriptorSupplier {
PublisherFileDescriptorSupplier() {}
}
private static final class PublisherMethodDescriptorSupplier
extends PublisherBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final java.lang.String methodName;
PublisherMethodDescriptorSupplier(java.lang.String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (PublisherGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor =
result =
io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new PublisherFileDescriptorSupplier())
.addMethod(getPublishChannelConnectionEventsMethod())
.addMethod(getPublishEventsMethod())
.addMethod(getPublishMethod())
.build();
}
}
}
return result;
}
}
|
hibernate/hibernate-search | 33,447 | integrationtest/mapper/pojo-base/src/test/java/org/hibernate/search/integrationtest/mapper/pojo/mapping/definition/DependencyIT.java | /*
* SPDX-License-Identifier: Apache-2.0
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.search.integrationtest.mapper.pojo.mapping.definition;
import static org.assertj.core.api.Assertions.assertThatCode;
import static org.assertj.core.api.Assertions.assertThatThrownBy;
import java.lang.invoke.MethodHandles;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.AssociationInverseSide;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.DocumentId;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.GenericField;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.Indexed;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.IndexedEmbedded;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.IndexingDependency;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.ObjectPath;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.PropertyValue;
import org.hibernate.search.mapper.pojo.mapping.definition.annotation.SearchEntity;
import org.hibernate.search.mapper.pojo.standalone.mapping.SearchMapping;
import org.hibernate.search.mapper.pojo.standalone.session.SearchSession;
import org.hibernate.search.util.common.SearchException;
import org.hibernate.search.util.impl.integrationtest.common.extension.BackendMock;
import org.hibernate.search.util.impl.integrationtest.common.reporting.FailureReportUtils;
import org.hibernate.search.util.impl.integrationtest.mapper.pojo.standalone.StandalonePojoMappingSetupHelper;
import org.hibernate.search.util.impl.test.annotation.TestForIssue;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.RegisterExtension;
@SuppressWarnings("unused")
class DependencyIT {
@RegisterExtension
public BackendMock backendMock = BackendMock.create();
@RegisterExtension
public StandalonePojoMappingSetupHelper setupHelper =
StandalonePojoMappingSetupHelper.withBackendMock( MethodHandles.lookup(), backendMock );
@Test
void associationInverseSide_error_missingInversePath() {
@Indexed
class IndexedEntity {
@DocumentId
Integer id;
@AssociationInverseSide(inversePath = @ObjectPath({ }))
public IndexedEntity getOther() {
throw new UnsupportedOperationException( "Should not be called" );
}
}
assertThatThrownBy(
() -> setupHelper.start().setup( IndexedEntity.class )
)
.isInstanceOf( SearchException.class )
.satisfies( FailureReportUtils.hasFailureReport()
.typeContext( IndexedEntity.class.getName() )
.pathContext( ".other" )
.annotationContextAnyParameters( AssociationInverseSide.class )
.failure(
"@AssociationInverseSide.inversePath is empty"
)
);
}
@Test
void derivedFrom() {
final String indexName = "index1";
@Indexed(index = indexName)
class IndexedEntity {
@DocumentId
Integer id;
String source1;
String source2;
String unused;
@GenericField
@IndexingDependency(derivedFrom = {
@ObjectPath(@PropertyValue(propertyName = "source1")),
@ObjectPath(@PropertyValue(propertyName = "source2")) })
public String getDerived() {
return source1 + " " + source2;
}
}
backendMock.expectSchema( indexName, b -> b
.field( "derived", String.class )
);
SearchMapping mapping = setupHelper.start().expectCustomBeans().setup( IndexedEntity.class );
backendMock.verifyExpectationsMet();
IndexedEntity entity = new IndexedEntity();
entity.id = 1;
entity.source1 = "init1";
entity.source2 = "init2";
entity.unused = "init3";
try ( SearchSession session = mapping.createSession() ) {
session.indexingPlan().add( entity );
backendMock.expectWorks( indexName )
.add( "1", b -> b.field( "derived", "init1 init2" ) );
}
backendMock.verifyExpectationsMet();
// Changed to unused properties are ignored
try ( SearchSession session = mapping.createSession() ) {
entity.unused = "updated3";
session.indexingPlan().addOrUpdate( entity, "unused" );
// Expect no reindexing at all
}
backendMock.verifyExpectationsMet();
// Changes to source properties trigger reindexing
try ( SearchSession session = mapping.createSession() ) {
entity.source1 = "updated1";
session.indexingPlan().addOrUpdate( entity, "source1" );
backendMock.expectWorks( indexName )
.addOrUpdate( "1", b -> b.field( "derived", "updated1 init2" ) );
}
backendMock.verifyExpectationsMet();
}
@Test
@TestForIssue(jiraKey = "HSEARCH-4148")
void derivedFrom_polymorphism() {
final String index1Name = "index1Name";
final String index2Name = "index2Name";
@Indexed
abstract class AbstractIndexedEntity {
@DocumentId
Integer id;
@GenericField
public abstract String getDerived();
}
@Indexed(index = index1Name)
class IndexedEntity1 extends AbstractIndexedEntity {
String source1;
String source2;
String source3;
String source4;
@Override
@IndexingDependency(derivedFrom = {
@ObjectPath(@PropertyValue(propertyName = "source1")),
@ObjectPath(@PropertyValue(propertyName = "source2")),
@ObjectPath(@PropertyValue(propertyName = "source4")) })
public String getDerived() {
return source1 + " " + source2 + " " + source4;
}
}
@Indexed(index = index2Name)
class IndexedEntity2 extends AbstractIndexedEntity {
String source1;
String source2;
String source3;
String source5;
@Override
@IndexingDependency(derivedFrom = {
@ObjectPath(@PropertyValue(propertyName = "source1")),
@ObjectPath(@PropertyValue(propertyName = "source3")),
@ObjectPath(@PropertyValue(propertyName = "source5")) })
public String getDerived() {
return source1 + " " + source3 + " " + source5;
}
}
backendMock.expectSchema( index1Name, b -> b
.field( "derived", String.class )
);
backendMock.expectSchema( index2Name, b -> b
.field( "derived", String.class )
);
SearchMapping mapping = setupHelper.start().expectCustomBeans()
.setup( AbstractIndexedEntity.class, IndexedEntity1.class, IndexedEntity2.class );
backendMock.verifyExpectationsMet();
IndexedEntity1 entity1 = new IndexedEntity1();
entity1.id = 1;
entity1.source1 = "init1";
entity1.source2 = "init2";
entity1.source3 = "init3";
entity1.source4 = "init4";
IndexedEntity2 entity2 = new IndexedEntity2();
entity2.id = 2;
entity2.source1 = "init1";
entity2.source2 = "init2";
entity2.source3 = "init3";
entity2.source5 = "init5";
try ( SearchSession session = mapping.createSession() ) {
session.indexingPlan().add( entity1 );
session.indexingPlan().add( entity2 );
backendMock.expectWorks( index1Name )
.add( "1", b -> b.field( "derived", "init1 init2 init4" ) );
backendMock.expectWorks( index2Name )
.add( "2", b -> b.field( "derived", "init1 init3 init5" ) );
}
backendMock.verifyExpectationsMet();
// Changed to unused properties are ignored
try ( SearchSession session = mapping.createSession() ) {
entity1.source3 = "updated3";
entity2.source2 = "updated2";
session.indexingPlan().addOrUpdate( entity1, "source3" );
session.indexingPlan().addOrUpdate( entity2, "source2" );
// Expect no reindexing at all
}
backendMock.verifyExpectationsMet();
// Changes to common source properties trigger reindexing
try ( SearchSession session = mapping.createSession() ) {
entity1.source1 = "updated1";
entity2.source1 = "updated1";
session.indexingPlan().addOrUpdate( entity1, "source1" );
session.indexingPlan().addOrUpdate( entity2, "source1" );
backendMock.expectWorks( index1Name )
.addOrUpdate( "1", b -> b.field( "derived", "updated1 init2 init4" ) );
backendMock.expectWorks( index2Name )
.addOrUpdate( "2", b -> b.field( "derived", "updated1 init3 init5" ) );
}
backendMock.verifyExpectationsMet();
// Changes to specific properties that exist in both types, but are source in only one type
// trigger reindexing for the relevant types.
try ( SearchSession session = mapping.createSession() ) {
entity1.source2 = "updated2";
entity2.source3 = "updated3";
session.indexingPlan().addOrUpdate( entity1, "source2" );
session.indexingPlan().addOrUpdate( entity2, "source3" );
backendMock.expectWorks( index1Name )
.addOrUpdate( "1", b -> b.field( "derived", "updated1 updated2 init4" ) );
backendMock.expectWorks( index2Name )
.addOrUpdate( "2", b -> b.field( "derived", "updated1 updated3 init5" ) );
}
backendMock.verifyExpectationsMet();
// Changes to specific properties that exist in only one type and are source in only one type
// trigger reindexing for the relevant types.
try ( SearchSession session = mapping.createSession() ) {
entity1.source4 = "updated4";
entity2.source5 = "updated5";
session.indexingPlan().addOrUpdate( entity1, "source4" );
session.indexingPlan().addOrUpdate( entity2, "source5" );
backendMock.expectWorks( index1Name )
.addOrUpdate( "1", b -> b.field( "derived", "updated1 updated2 updated4" ) );
backendMock.expectWorks( index2Name )
.addOrUpdate( "2", b -> b.field( "derived", "updated1 updated3 updated5" ) );
}
backendMock.verifyExpectationsMet();
}
/**
* Test a polymorphic derivedFrom that is not at the root, e.g. on a property in an @IndexedEmbedded.
* <p>
* This is sensibly different from a polymorphic derivedFrom at the root,
* since we need to handle polymorphism in a single reindexing resolver,
* instead of having a separate reindexing resolver per type.
*/
@Test
@TestForIssue(jiraKey = "HSEARCH-4148")
void derivedFrom_nonRoot_polymorphism() {
final String indexName = "indexName";
class Model {
@Indexed(index = indexName)
class IndexedEntity {
@DocumentId
Integer id;
@IndexedEmbedded
AbstractContainedEntity contained;
}
abstract class AbstractContainedEntity {
@AssociationInverseSide(inversePath = @ObjectPath(@PropertyValue(propertyName = "contained")))
IndexedEntity containing;
@GenericField
public abstract String getDerived();
}
class ContainedEntity1 extends AbstractContainedEntity {
String source1;
String source2;
String source3;
String source4;
@Override
@IndexingDependency(derivedFrom = {
@ObjectPath(@PropertyValue(propertyName = "source1")),
@ObjectPath(@PropertyValue(propertyName = "source2")),
@ObjectPath(@PropertyValue(propertyName = "source4"))
})
public String getDerived() {
return source1 + " " + source2 + " " + source4;
}
}
class ContainedEntity2 extends AbstractContainedEntity {
String source1;
String source2;
String source3;
String source5;
@Override
@IndexingDependency(derivedFrom = {
@ObjectPath(@PropertyValue(propertyName = "source1")),
@ObjectPath(@PropertyValue(propertyName = "source3")),
@ObjectPath(@PropertyValue(propertyName = "source5"))
})
public String getDerived() {
return source1 + " " + source3 + " " + source5;
}
}
}
backendMock.expectSchema( indexName, b -> b
.objectField( "contained", b2 -> b2
.field( "derived", String.class )
)
);
SearchMapping mapping = setupHelper.start().expectCustomBeans()
.setup( Model.IndexedEntity.class, Model.AbstractContainedEntity.class,
Model.ContainedEntity1.class, Model.ContainedEntity2.class );
backendMock.verifyExpectationsMet();
Model model = new Model();
Model.IndexedEntity indexed1 = model.new IndexedEntity();
indexed1.id = 1;
Model.ContainedEntity1 contained1 = model.new ContainedEntity1();
indexed1.contained = contained1;
contained1.containing = indexed1;
contained1.source1 = "init1";
contained1.source2 = "init2";
contained1.source3 = "init3";
contained1.source4 = "init4";
Model.IndexedEntity indexed2 = model.new IndexedEntity();
indexed2.id = 2;
Model.ContainedEntity2 contained2 = model.new ContainedEntity2();
indexed2.contained = contained2;
contained2.containing = indexed2;
contained2.source1 = "init1";
contained2.source2 = "init2";
contained2.source3 = "init3";
contained2.source5 = "init5";
try ( SearchSession session = mapping.createSession() ) {
session.indexingPlan().add( indexed1 );
session.indexingPlan().add( indexed2 );
backendMock.expectWorks( indexName )
.add( "1", b -> b.objectField( "contained", b2 -> b2
.field( "derived", "init1 init2 init4" ) ) )
.add( "2", b -> b.objectField( "contained", b2 -> b2
.field( "derived", "init1 init3 init5" ) ) );
}
backendMock.verifyExpectationsMet();
// Changed to unused properties are ignored
try ( SearchSession session = mapping.createSession() ) {
contained1.source3 = "updated3";
contained2.source2 = "updated2";
session.indexingPlan().addOrUpdate( 1, null, contained1, "source3" );
session.indexingPlan().addOrUpdate( 2, null, contained2, "source2" );
// Expect no reindexing at all
}
backendMock.verifyExpectationsMet();
// Changes to common source properties trigger reindexing
try ( SearchSession session = mapping.createSession() ) {
contained1.source1 = "updated1";
contained2.source1 = "updated1";
session.indexingPlan().addOrUpdate( 1, null, contained1, "source1" );
session.indexingPlan().addOrUpdate( 2, null, contained2, "source1" );
backendMock.expectWorks( indexName )
.addOrUpdate( "1", b -> b.objectField( "contained", b2 -> b2
.field( "derived", "updated1 init2 init4" ) ) )
.addOrUpdate( "2", b -> b.objectField( "contained", b2 -> b2
.field( "derived", "updated1 init3 init5" ) ) );
}
backendMock.verifyExpectationsMet();
// Changes to specific properties that exist in both types, but are source in only one type
// trigger reindexing for the relevant types.
try ( SearchSession session = mapping.createSession() ) {
contained1.source2 = "updated2";
contained2.source3 = "updated3";
session.indexingPlan().addOrUpdate( 1, null, contained1, "source2" );
session.indexingPlan().addOrUpdate( 2, null, contained2, "source3" );
backendMock.expectWorks( indexName )
.addOrUpdate( "1", b -> b.objectField( "contained", b2 -> b2
.field( "derived", "updated1 updated2 init4" ) ) )
.addOrUpdate( "2", b -> b.objectField( "contained", b2 -> b2
.field( "derived", "updated1 updated3 init5" ) ) );
}
backendMock.verifyExpectationsMet();
// Changes to specific properties that exist in only one type and are source in only one type
// trigger reindexing for the relevant types.
try ( SearchSession session = mapping.createSession() ) {
contained1.source4 = "updated4";
contained2.source5 = "updated5";
session.indexingPlan().addOrUpdate( 1, null, contained1, "source4" );
session.indexingPlan().addOrUpdate( 2, null, contained2, "source5" );
backendMock.expectWorks( indexName )
.addOrUpdate( "1", b -> b.objectField( "contained", b2 -> b2
.field( "derived", "updated1 updated2 updated4" ) ) )
.addOrUpdate( "2", b -> b.objectField( "contained", b2 -> b2
.field( "derived", "updated1 updated3 updated5" ) ) );
}
backendMock.verifyExpectationsMet();
}
/**
* Test a polymorphic derivedFrom that is not at the root, e.g. on a property in an @IndexedEmbedded,
* and that involves generics (which should thus be preserved).
* <p>
* This test is useful mainly for non-regression, because the handling of polymorphism involves casts,
* and if implemented incorrectly those cases could result in type erasure that would make the whole process fail.
*
* @see #derivedFrom_nonRoot_polymorphism()
*/
@Test
@TestForIssue(jiraKey = "HSEARCH-4148")
void derivedFrom_nonRoot_polymorphism_prevervesGenerics() {
final String indexName = "indexName";
class Model {
@Indexed(index = indexName)
class IndexedEntity {
@DocumentId
Integer id;
@IndexedEmbedded
AbstractContainedEntity<Model.OtherContainedEntity> contained;
}
abstract class AbstractContainedEntity<T> {
@AssociationInverseSide(inversePath = @ObjectPath(@PropertyValue(propertyName = "contained")))
IndexedEntity containing;
T contained;
@GenericField
public abstract String getDerived();
}
class ContainedEntity1<T extends OtherContainedSuperClass> extends AbstractContainedEntity<T> {
@Override
@IndexingDependency(derivedFrom = {
@ObjectPath({ @PropertyValue(propertyName = "contained"), @PropertyValue(propertyName = "source1") }),
@ObjectPath({ @PropertyValue(propertyName = "contained"), @PropertyValue(propertyName = "source2") })
})
public String getDerived() {
return contained.source1 + " " + contained.source2;
}
}
class ContainedEntity2<T extends OtherContainedSuperClass> extends AbstractContainedEntity<T> {
@Override
@IndexingDependency(derivedFrom = {
@ObjectPath({ @PropertyValue(propertyName = "contained"), @PropertyValue(propertyName = "source1") }),
@ObjectPath({ @PropertyValue(propertyName = "contained"), @PropertyValue(propertyName = "source3") })
})
public String getDerived() {
return contained.source1 + " " + contained.source3;
}
}
class OtherContainedSuperClass {
String source1;
String source2;
String source3;
}
class OtherContainedEntity extends OtherContainedSuperClass {
@AssociationInverseSide(inversePath = @ObjectPath(@PropertyValue(propertyName = "contained")))
AbstractContainedEntity<OtherContainedEntity> containing;
}
}
backendMock.expectSchema( indexName, b -> b
.objectField( "contained", b2 -> b2
.field( "derived", String.class )
)
);
SearchMapping mapping = setupHelper.start().expectCustomBeans()
.setup( Model.IndexedEntity.class, Model.AbstractContainedEntity.class,
Model.ContainedEntity1.class, Model.ContainedEntity2.class, Model.OtherContainedEntity.class );
backendMock.verifyExpectationsMet();
Model model = new Model();
Model.IndexedEntity indexed1 = model.new IndexedEntity();
indexed1.id = 1;
Model.ContainedEntity1<Model.OtherContainedEntity> contained1 = model.new ContainedEntity1<>();
indexed1.contained = contained1;
contained1.containing = indexed1;
Model.OtherContainedEntity otherContained1 = model.new OtherContainedEntity();
contained1.contained = otherContained1;
otherContained1.containing = contained1;
otherContained1.source1 = "init1";
otherContained1.source2 = "init2";
otherContained1.source3 = "init3";
Model.IndexedEntity indexed2 = model.new IndexedEntity();
indexed2.id = 2;
Model.ContainedEntity2<Model.OtherContainedEntity> contained2 = model.new ContainedEntity2<>();
indexed2.contained = contained2;
contained2.containing = indexed2;
Model.OtherContainedEntity otherContained2 = model.new OtherContainedEntity();
contained2.contained = otherContained2;
otherContained2.containing = contained2;
otherContained2.source1 = "init1";
otherContained2.source2 = "init2";
otherContained2.source3 = "init3";
try ( SearchSession session = mapping.createSession() ) {
session.indexingPlan().add( indexed1 );
session.indexingPlan().add( indexed2 );
backendMock.expectWorks( indexName )
.add( "1", b -> b.objectField( "contained", b2 -> b2
.field( "derived", "init1 init2" ) ) )
.add( "2", b -> b.objectField( "contained", b2 -> b2
.field( "derived", "init1 init3" ) ) );
}
backendMock.verifyExpectationsMet();
// Changed to unused properties are ignored
try ( SearchSession session = mapping.createSession() ) {
otherContained1.source3 = "updated3";
otherContained2.source2 = "updated2";
session.indexingPlan().addOrUpdate( 1, null, otherContained1, "source3" );
session.indexingPlan().addOrUpdate( 2, null, otherContained2, "source2" );
// Expect no reindexing at all
}
backendMock.verifyExpectationsMet();
// Changes to common source properties trigger reindexing
try ( SearchSession session = mapping.createSession() ) {
otherContained1.source1 = "updated1";
otherContained2.source1 = "updated1";
session.indexingPlan().addOrUpdate( 1, null, otherContained1, "source1" );
session.indexingPlan().addOrUpdate( 2, null, otherContained2, "source1" );
backendMock.expectWorks( indexName )
.addOrUpdate( "1", b -> b.objectField( "contained", b2 -> b2
.field( "derived", "updated1 init2" ) ) )
.addOrUpdate( "2", b -> b.objectField( "contained", b2 -> b2
.field( "derived", "updated1 init3" ) ) );
}
backendMock.verifyExpectationsMet();
// Changes to specific properties that exist in both types, but are source in only one type
// trigger reindexing for the relevant types.
try ( SearchSession session = mapping.createSession() ) {
otherContained1.source2 = "updated2";
otherContained2.source3 = "updated3";
session.indexingPlan().addOrUpdate( 1, null, otherContained1, "source2" );
session.indexingPlan().addOrUpdate( 2, null, otherContained2, "source3" );
backendMock.expectWorks( indexName )
.addOrUpdate( "1", b -> b.objectField( "contained", b2 -> b2
.field( "derived", "updated1 updated2" ) ) )
.addOrUpdate( "2", b -> b.objectField( "contained", b2 -> b2
.field( "derived", "updated1 updated3" ) ) );
}
backendMock.verifyExpectationsMet();
}
@Test
void derivedFrom_error_missingPath() {
@Indexed
class IndexedEntity {
@DocumentId
Integer id;
@GenericField
@IndexingDependency(derivedFrom = @ObjectPath({ }))
public String getDerived() {
throw new UnsupportedOperationException( "Should not be called" );
}
}
assertThatThrownBy(
() -> setupHelper.start().setup( IndexedEntity.class )
)
.isInstanceOf( SearchException.class )
.satisfies( FailureReportUtils.hasFailureReport()
.typeContext( IndexedEntity.class.getName() )
.pathContext( ".derived" )
.annotationContextAnyParameters( IndexingDependency.class )
.failure(
"@IndexingDependency.derivedFrom contains an empty path"
)
);
}
@Test
void derivedFrom_error_invalidPath() {
@Indexed
class IndexedEntity {
@DocumentId
Integer id;
@GenericField
@IndexingDependency(derivedFrom = @ObjectPath(@PropertyValue(propertyName = "invalidPath")))
public String getDerived() {
throw new UnsupportedOperationException( "Should not be called" );
}
}
assertThatThrownBy(
() -> setupHelper.start().setup( IndexedEntity.class )
)
.isInstanceOf( SearchException.class )
.satisfies( FailureReportUtils.hasFailureReport()
.typeContext( IndexedEntity.class.getName() )
.pathContext( ".derived<no value extractors>" )
.failure( "No readable property named 'invalidPath' on type '"
+ IndexedEntity.class.getName() + "'" ) );
}
@Test
void derivedFrom_error_cycle() {
class DerivedFromCycle {
@SearchEntity
@Indexed
class A {
@DocumentId
Integer id;
B b;
@GenericField
@IndexingDependency(derivedFrom = @ObjectPath({
@PropertyValue(propertyName = "b"),
@PropertyValue(propertyName = "derivedB")
}))
public String getDerivedA() {
throw new UnsupportedOperationException( "Should not be called" );
}
}
class B {
C c;
@GenericField
@IndexingDependency(derivedFrom = @ObjectPath({
@PropertyValue(propertyName = "c"),
@PropertyValue(propertyName = "derivedC")
}))
public String getDerivedB() {
throw new UnsupportedOperationException( "Should not be called" );
}
}
class C {
A a;
@GenericField
@IndexingDependency(derivedFrom = @ObjectPath({
@PropertyValue(propertyName = "a"),
@PropertyValue(propertyName = "derivedA")
}))
public String getDerivedC() {
throw new UnsupportedOperationException( "Should not be called" );
}
}
}
assertThatThrownBy(
() -> setupHelper.start()
.withAnnotatedTypes( DerivedFromCycle.A.class )
.withAnnotatedTypes( DerivedFromCycle.B.class, DerivedFromCycle.C.class )
.setup()
)
.isInstanceOf( SearchException.class )
.satisfies( FailureReportUtils.hasFailureReport()
.typeContext( DerivedFromCycle.A.class.getName() )
.pathContext( ".derivedA<no value extractors>" )
.multilineFailure( "Unable to resolve dependencies of a derived property:"
+ " there is a cyclic dependency starting from type '" + DerivedFromCycle.A.class.getName()
+ "'",
"Derivation chain starting from that type and ending with a cycle:\n"
+ "- " + DerivedFromCycle.A.class.getName()
+ "#.b<default value extractors>.derivedB<default value extractors>\n"
+ "- " + DerivedFromCycle.B.class.getName()
+ "#.c<default value extractors>.derivedC<default value extractors>\n"
+ "- " + DerivedFromCycle.C.class.getName()
+ "#.a<default value extractors>.derivedA<default value extractors>\n",
"A derived property cannot be marked as derived from itself",
"you should consider disabling automatic reindexing"
) );
}
@Test
@TestForIssue(jiraKey = "HSEARCH-4565")
void derivedFrom_error_cycle_buried() {
class DerivedFromCycle {
@SearchEntity
@Indexed
class Zero {
@DocumentId
Integer id;
A a;
@GenericField
@IndexingDependency(derivedFrom = @ObjectPath({
@PropertyValue(propertyName = "a"),
@PropertyValue(propertyName = "derivedA")
}))
public String getDerivedZero() {
throw new UnsupportedOperationException( "Should not be called" );
}
}
class A {
B b;
@GenericField
@IndexingDependency(derivedFrom = @ObjectPath({
@PropertyValue(propertyName = "b"),
@PropertyValue(propertyName = "derivedB")
}))
public String getDerivedA() {
throw new UnsupportedOperationException( "Should not be called" );
}
}
class B {
C c;
@GenericField
@IndexingDependency(derivedFrom = @ObjectPath({
@PropertyValue(propertyName = "c"),
@PropertyValue(propertyName = "derivedC")
}))
public String getDerivedB() {
throw new UnsupportedOperationException( "Should not be called" );
}
}
class C {
A a;
@GenericField
@IndexingDependency(derivedFrom = @ObjectPath({
@PropertyValue(propertyName = "a"),
@PropertyValue(propertyName = "derivedA")
}))
public String getDerivedC() {
throw new UnsupportedOperationException( "Should not be called" );
}
}
}
assertThatThrownBy(
() -> setupHelper.start()
.withAnnotatedTypes( DerivedFromCycle.Zero.class )
.withAnnotatedTypes( DerivedFromCycle.A.class, DerivedFromCycle.B.class, DerivedFromCycle.C.class )
.setup()
)
.isInstanceOf( SearchException.class )
.satisfies( FailureReportUtils.hasFailureReport()
.typeContext( DerivedFromCycle.Zero.class.getName() )
.pathContext( ".derivedZero<no value extractors>" )
.multilineFailure( "Unable to resolve dependencies of a derived property:"
+ " there is a cyclic dependency starting from type '" + DerivedFromCycle.A.class.getName()
+ "'",
"Derivation chain starting from that type and ending with a cycle:\n"
+ "- " + DerivedFromCycle.A.class.getName()
+ "#.b<default value extractors>.derivedB<default value extractors>\n"
+ "- " + DerivedFromCycle.B.class.getName()
+ "#.c<default value extractors>.derivedC<default value extractors>\n"
+ "- " + DerivedFromCycle.C.class.getName()
+ "#.a<default value extractors>.derivedA<default value extractors>\n",
"A derived property cannot be marked as derived from itself",
"you should consider disabling automatic reindexing"
)
);
}
@Test
@TestForIssue(jiraKey = "HSEARCH-4423")
void derivedFrom_cycleFalsePositive() {
final String indexName = "myindex";
class DerivedFromCycle {
@SearchEntity
@Indexed(index = indexName)
class A {
@DocumentId
Integer id;
B b;
@GenericField
@IndexingDependency(derivedFrom = @ObjectPath({
@PropertyValue(propertyName = "b"),
@PropertyValue(propertyName = "c"),
@PropertyValue(propertyName = "derivedA")
}))
public String getDerivedA() {
throw new UnsupportedOperationException( "Should not be called" );
}
}
class B {
C c;
}
class C {
A a;
// Important: this property must have the same name as the property in A
public String getDerivedA() {
throw new UnsupportedOperationException( "Should not be called" );
}
}
}
backendMock.expectSchema( indexName, b -> b
.field( "derivedA", String.class )
);
assertThatCode(
() -> setupHelper.start()
.withAnnotatedTypes( DerivedFromCycle.A.class )
.withAnnotatedTypes( DerivedFromCycle.B.class, DerivedFromCycle.C.class )
.setup()
)
.doesNotThrowAnyException();
}
@Test
void error_cannotInvertAssociation() {
class CannotInvertAssociation {
@Indexed
class A {
@DocumentId
Integer id;
@IndexedEmbedded
Embedded embedded;
}
class Embedded {
@IndexedEmbedded
B b;
}
class B {
A a;
@GenericField
String text;
}
}
assertThatThrownBy(
() -> setupHelper.start().setup(
CannotInvertAssociation.A.class, CannotInvertAssociation.B.class
)
)
.isInstanceOf( SearchException.class )
.satisfies( FailureReportUtils.hasFailureReport()
.typeContext( CannotInvertAssociation.A.class.getName() )
.pathContext( ".embedded<no value extractors>.b<no value extractors>.text<no value extractors>" )
.failure(
"Unable to find the inverse side of the association on type '"
+ CannotInvertAssociation.A.class.getName() + "'"
+ " at path '.embedded<no value extractors>.b<no value extractors>'",
" Hibernate Search needs this information in order to reindex '"
+ CannotInvertAssociation.A.class.getName() + "' when '"
+ CannotInvertAssociation.B.class.getName() + "' is modified.",
// Tips
"@OneToMany(mappedBy",
"@AssociationInverseSide",
"if you do not need to reindex '"
+ CannotInvertAssociation.A.class.getName() + "' when '"
+ CannotInvertAssociation.B.class.getName() + "' is modified",
"@IndexingDependency(reindexOnUpdate = ReindexOnUpdate.SHALLOW)"
) );
}
@Test
void error_cannotApplyInvertAssociationPath_propertyNotFound() {
class CannotApplyInvertAssociationPath {
@Indexed
class A {
@DocumentId
Integer id;
@IndexedEmbedded
@AssociationInverseSide(inversePath = @ObjectPath(@PropertyValue(propertyName = "invalidPath")))
B b;
}
class B {
A a;
@GenericField
String text;
}
}
assertThatThrownBy(
() -> setupHelper.start().setup(
CannotApplyInvertAssociationPath.A.class, CannotApplyInvertAssociationPath.B.class
)
)
.isInstanceOf( SearchException.class )
.hasMessageContaining(
"Unable to apply path '.invalidPath<default value extractors>'"
+ " to type '" + CannotApplyInvertAssociationPath.B.class.getName() + "'"
)
.hasMessageContaining(
"This path was resolved as the inverse side of the association '.b<no value extractors>'"
+ " on type '" + CannotApplyInvertAssociationPath.A.class.getName() + "'"
)
.hasMessageContaining(
"Hibernate Search needs to apply this path in order to reindex '"
+ CannotApplyInvertAssociationPath.A.class.getName() + "' when '"
+ CannotApplyInvertAssociationPath.B.class.getName() + "' is modified."
)
.hasMessageContaining( "No readable property named 'invalidPath' on type '"
+ CannotApplyInvertAssociationPath.B.class.getName() + "'" );
}
@Test
void error_cannotApplyInvertAssociationPath_incorrectTargetTypeForInverseAssociation() {
class CannotApplyInvertAssociationPath {
@Indexed
class A {
@DocumentId
Integer id;
@IndexedEmbedded
@AssociationInverseSide(inversePath = @ObjectPath(@PropertyValue(propertyName = "a")))
B b;
}
class B {
String a;
@GenericField
String text;
}
}
assertThatThrownBy(
() -> setupHelper.start().setup(
CannotApplyInvertAssociationPath.A.class, CannotApplyInvertAssociationPath.B.class
)
)
.isInstanceOf( SearchException.class )
.hasMessageContaining(
"Unable to apply path '.a<default value extractors>'"
+ " to type '" + CannotApplyInvertAssociationPath.B.class.getName() + "'"
)
.hasMessageContaining(
"This path was resolved as the inverse side of the association '.b<no value extractors>'"
+ " on type '" + CannotApplyInvertAssociationPath.A.class.getName() + "'"
)
.hasMessageContaining(
"Hibernate Search needs to apply this path in order to reindex '"
+ CannotApplyInvertAssociationPath.A.class.getName() + "' when '"
+ CannotApplyInvertAssociationPath.B.class.getName() + "' is modified."
)
.hasMessageContaining(
"The inverse association targets type '" + String.class.getName()
+ "', but a supertype or subtype of '" + CannotApplyInvertAssociationPath.A.class.getName()
+ "' was expected"
);
}
}
|
googleapis/google-cloud-java | 36,894 | java-servicedirectory/proto-google-cloud-servicedirectory-v1beta1/src/main/java/com/google/cloud/servicedirectory/v1beta1/ListServicesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/servicedirectory/v1beta1/registration_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.servicedirectory.v1beta1;
/**
*
*
* <pre>
* The response message for
* [RegistrationService.ListServices][google.cloud.servicedirectory.v1beta1.RegistrationService.ListServices].
* </pre>
*
* Protobuf type {@code google.cloud.servicedirectory.v1beta1.ListServicesResponse}
*/
public final class ListServicesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.servicedirectory.v1beta1.ListServicesResponse)
ListServicesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListServicesResponse.newBuilder() to construct.
private ListServicesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListServicesResponse() {
services_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListServicesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.servicedirectory.v1beta1.RegistrationServiceProto
.internal_static_google_cloud_servicedirectory_v1beta1_ListServicesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.servicedirectory.v1beta1.RegistrationServiceProto
.internal_static_google_cloud_servicedirectory_v1beta1_ListServicesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.servicedirectory.v1beta1.ListServicesResponse.class,
com.google.cloud.servicedirectory.v1beta1.ListServicesResponse.Builder.class);
}
public static final int SERVICES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.servicedirectory.v1beta1.Service> services_;
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1beta1.Service services = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.servicedirectory.v1beta1.Service> getServicesList() {
return services_;
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1beta1.Service services = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.servicedirectory.v1beta1.ServiceOrBuilder>
getServicesOrBuilderList() {
return services_;
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1beta1.Service services = 1;</code>
*/
@java.lang.Override
public int getServicesCount() {
return services_.size();
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1beta1.Service services = 1;</code>
*/
@java.lang.Override
public com.google.cloud.servicedirectory.v1beta1.Service getServices(int index) {
return services_.get(index);
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1beta1.Service services = 1;</code>
*/
@java.lang.Override
public com.google.cloud.servicedirectory.v1beta1.ServiceOrBuilder getServicesOrBuilder(
int index) {
return services_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < services_.size(); i++) {
output.writeMessage(1, services_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < services_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, services_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.servicedirectory.v1beta1.ListServicesResponse)) {
return super.equals(obj);
}
com.google.cloud.servicedirectory.v1beta1.ListServicesResponse other =
(com.google.cloud.servicedirectory.v1beta1.ListServicesResponse) obj;
if (!getServicesList().equals(other.getServicesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getServicesCount() > 0) {
hash = (37 * hash) + SERVICES_FIELD_NUMBER;
hash = (53 * hash) + getServicesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.servicedirectory.v1beta1.ListServicesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.servicedirectory.v1beta1.ListServicesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.servicedirectory.v1beta1.ListServicesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.servicedirectory.v1beta1.ListServicesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.servicedirectory.v1beta1.ListServicesResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.servicedirectory.v1beta1.ListServicesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.servicedirectory.v1beta1.ListServicesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.servicedirectory.v1beta1.ListServicesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.servicedirectory.v1beta1.ListServicesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.servicedirectory.v1beta1.ListServicesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.servicedirectory.v1beta1.ListServicesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.servicedirectory.v1beta1.ListServicesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.servicedirectory.v1beta1.ListServicesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The response message for
* [RegistrationService.ListServices][google.cloud.servicedirectory.v1beta1.RegistrationService.ListServices].
* </pre>
*
* Protobuf type {@code google.cloud.servicedirectory.v1beta1.ListServicesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.servicedirectory.v1beta1.ListServicesResponse)
com.google.cloud.servicedirectory.v1beta1.ListServicesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.servicedirectory.v1beta1.RegistrationServiceProto
.internal_static_google_cloud_servicedirectory_v1beta1_ListServicesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.servicedirectory.v1beta1.RegistrationServiceProto
.internal_static_google_cloud_servicedirectory_v1beta1_ListServicesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.servicedirectory.v1beta1.ListServicesResponse.class,
com.google.cloud.servicedirectory.v1beta1.ListServicesResponse.Builder.class);
}
// Construct using com.google.cloud.servicedirectory.v1beta1.ListServicesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (servicesBuilder_ == null) {
services_ = java.util.Collections.emptyList();
} else {
services_ = null;
servicesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.servicedirectory.v1beta1.RegistrationServiceProto
.internal_static_google_cloud_servicedirectory_v1beta1_ListServicesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.servicedirectory.v1beta1.ListServicesResponse
getDefaultInstanceForType() {
return com.google.cloud.servicedirectory.v1beta1.ListServicesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.servicedirectory.v1beta1.ListServicesResponse build() {
com.google.cloud.servicedirectory.v1beta1.ListServicesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.servicedirectory.v1beta1.ListServicesResponse buildPartial() {
com.google.cloud.servicedirectory.v1beta1.ListServicesResponse result =
new com.google.cloud.servicedirectory.v1beta1.ListServicesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.servicedirectory.v1beta1.ListServicesResponse result) {
if (servicesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
services_ = java.util.Collections.unmodifiableList(services_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.services_ = services_;
} else {
result.services_ = servicesBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.servicedirectory.v1beta1.ListServicesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.servicedirectory.v1beta1.ListServicesResponse) {
return mergeFrom((com.google.cloud.servicedirectory.v1beta1.ListServicesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.servicedirectory.v1beta1.ListServicesResponse other) {
if (other
== com.google.cloud.servicedirectory.v1beta1.ListServicesResponse.getDefaultInstance())
return this;
if (servicesBuilder_ == null) {
if (!other.services_.isEmpty()) {
if (services_.isEmpty()) {
services_ = other.services_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureServicesIsMutable();
services_.addAll(other.services_);
}
onChanged();
}
} else {
if (!other.services_.isEmpty()) {
if (servicesBuilder_.isEmpty()) {
servicesBuilder_.dispose();
servicesBuilder_ = null;
services_ = other.services_;
bitField0_ = (bitField0_ & ~0x00000001);
servicesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getServicesFieldBuilder()
: null;
} else {
servicesBuilder_.addAllMessages(other.services_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.servicedirectory.v1beta1.Service m =
input.readMessage(
com.google.cloud.servicedirectory.v1beta1.Service.parser(),
extensionRegistry);
if (servicesBuilder_ == null) {
ensureServicesIsMutable();
services_.add(m);
} else {
servicesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.servicedirectory.v1beta1.Service> services_ =
java.util.Collections.emptyList();
private void ensureServicesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
services_ =
new java.util.ArrayList<com.google.cloud.servicedirectory.v1beta1.Service>(services_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.servicedirectory.v1beta1.Service,
com.google.cloud.servicedirectory.v1beta1.Service.Builder,
com.google.cloud.servicedirectory.v1beta1.ServiceOrBuilder>
servicesBuilder_;
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1beta1.Service services = 1;</code>
*/
public java.util.List<com.google.cloud.servicedirectory.v1beta1.Service> getServicesList() {
if (servicesBuilder_ == null) {
return java.util.Collections.unmodifiableList(services_);
} else {
return servicesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1beta1.Service services = 1;</code>
*/
public int getServicesCount() {
if (servicesBuilder_ == null) {
return services_.size();
} else {
return servicesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1beta1.Service services = 1;</code>
*/
public com.google.cloud.servicedirectory.v1beta1.Service getServices(int index) {
if (servicesBuilder_ == null) {
return services_.get(index);
} else {
return servicesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1beta1.Service services = 1;</code>
*/
public Builder setServices(int index, com.google.cloud.servicedirectory.v1beta1.Service value) {
if (servicesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureServicesIsMutable();
services_.set(index, value);
onChanged();
} else {
servicesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1beta1.Service services = 1;</code>
*/
public Builder setServices(
int index, com.google.cloud.servicedirectory.v1beta1.Service.Builder builderForValue) {
if (servicesBuilder_ == null) {
ensureServicesIsMutable();
services_.set(index, builderForValue.build());
onChanged();
} else {
servicesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1beta1.Service services = 1;</code>
*/
public Builder addServices(com.google.cloud.servicedirectory.v1beta1.Service value) {
if (servicesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureServicesIsMutable();
services_.add(value);
onChanged();
} else {
servicesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1beta1.Service services = 1;</code>
*/
public Builder addServices(int index, com.google.cloud.servicedirectory.v1beta1.Service value) {
if (servicesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureServicesIsMutable();
services_.add(index, value);
onChanged();
} else {
servicesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1beta1.Service services = 1;</code>
*/
public Builder addServices(
com.google.cloud.servicedirectory.v1beta1.Service.Builder builderForValue) {
if (servicesBuilder_ == null) {
ensureServicesIsMutable();
services_.add(builderForValue.build());
onChanged();
} else {
servicesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1beta1.Service services = 1;</code>
*/
public Builder addServices(
int index, com.google.cloud.servicedirectory.v1beta1.Service.Builder builderForValue) {
if (servicesBuilder_ == null) {
ensureServicesIsMutable();
services_.add(index, builderForValue.build());
onChanged();
} else {
servicesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1beta1.Service services = 1;</code>
*/
public Builder addAllServices(
java.lang.Iterable<? extends com.google.cloud.servicedirectory.v1beta1.Service> values) {
if (servicesBuilder_ == null) {
ensureServicesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, services_);
onChanged();
} else {
servicesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1beta1.Service services = 1;</code>
*/
public Builder clearServices() {
if (servicesBuilder_ == null) {
services_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
servicesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1beta1.Service services = 1;</code>
*/
public Builder removeServices(int index) {
if (servicesBuilder_ == null) {
ensureServicesIsMutable();
services_.remove(index);
onChanged();
} else {
servicesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1beta1.Service services = 1;</code>
*/
public com.google.cloud.servicedirectory.v1beta1.Service.Builder getServicesBuilder(int index) {
return getServicesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1beta1.Service services = 1;</code>
*/
public com.google.cloud.servicedirectory.v1beta1.ServiceOrBuilder getServicesOrBuilder(
int index) {
if (servicesBuilder_ == null) {
return services_.get(index);
} else {
return servicesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1beta1.Service services = 1;</code>
*/
public java.util.List<? extends com.google.cloud.servicedirectory.v1beta1.ServiceOrBuilder>
getServicesOrBuilderList() {
if (servicesBuilder_ != null) {
return servicesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(services_);
}
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1beta1.Service services = 1;</code>
*/
public com.google.cloud.servicedirectory.v1beta1.Service.Builder addServicesBuilder() {
return getServicesFieldBuilder()
.addBuilder(com.google.cloud.servicedirectory.v1beta1.Service.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1beta1.Service services = 1;</code>
*/
public com.google.cloud.servicedirectory.v1beta1.Service.Builder addServicesBuilder(int index) {
return getServicesFieldBuilder()
.addBuilder(
index, com.google.cloud.servicedirectory.v1beta1.Service.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of services.
* </pre>
*
* <code>repeated .google.cloud.servicedirectory.v1beta1.Service services = 1;</code>
*/
public java.util.List<com.google.cloud.servicedirectory.v1beta1.Service.Builder>
getServicesBuilderList() {
return getServicesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.servicedirectory.v1beta1.Service,
com.google.cloud.servicedirectory.v1beta1.Service.Builder,
com.google.cloud.servicedirectory.v1beta1.ServiceOrBuilder>
getServicesFieldBuilder() {
if (servicesBuilder_ == null) {
servicesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.servicedirectory.v1beta1.Service,
com.google.cloud.servicedirectory.v1beta1.Service.Builder,
com.google.cloud.servicedirectory.v1beta1.ServiceOrBuilder>(
services_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
services_ = null;
}
return servicesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Token to retrieve the next page of results, or empty if there are no
* more results in the list.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.servicedirectory.v1beta1.ListServicesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.servicedirectory.v1beta1.ListServicesResponse)
private static final com.google.cloud.servicedirectory.v1beta1.ListServicesResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.servicedirectory.v1beta1.ListServicesResponse();
}
public static com.google.cloud.servicedirectory.v1beta1.ListServicesResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListServicesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListServicesResponse>() {
@java.lang.Override
public ListServicesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListServicesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListServicesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.servicedirectory.v1beta1.ListServicesResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,109 | java-discoveryengine/grpc-google-cloud-discoveryengine-v1beta/src/main/java/com/google/cloud/discoveryengine/v1beta/SchemaServiceGrpc.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.discoveryengine.v1beta;
import static io.grpc.MethodDescriptor.generateFullMethodName;
/**
*
*
* <pre>
* Service for managing [Schema][google.cloud.discoveryengine.v1beta.Schema]s.
* </pre>
*/
@javax.annotation.Generated(
value = "by gRPC proto compiler",
comments = "Source: google/cloud/discoveryengine/v1beta/schema_service.proto")
@io.grpc.stub.annotations.GrpcGenerated
public final class SchemaServiceGrpc {
private SchemaServiceGrpc() {}
public static final java.lang.String SERVICE_NAME =
"google.cloud.discoveryengine.v1beta.SchemaService";
// Static method descriptors that strictly reflect the proto.
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1beta.GetSchemaRequest,
com.google.cloud.discoveryengine.v1beta.Schema>
getGetSchemaMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "GetSchema",
requestType = com.google.cloud.discoveryengine.v1beta.GetSchemaRequest.class,
responseType = com.google.cloud.discoveryengine.v1beta.Schema.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1beta.GetSchemaRequest,
com.google.cloud.discoveryengine.v1beta.Schema>
getGetSchemaMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1beta.GetSchemaRequest,
com.google.cloud.discoveryengine.v1beta.Schema>
getGetSchemaMethod;
if ((getGetSchemaMethod = SchemaServiceGrpc.getGetSchemaMethod) == null) {
synchronized (SchemaServiceGrpc.class) {
if ((getGetSchemaMethod = SchemaServiceGrpc.getGetSchemaMethod) == null) {
SchemaServiceGrpc.getGetSchemaMethod =
getGetSchemaMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.discoveryengine.v1beta.GetSchemaRequest,
com.google.cloud.discoveryengine.v1beta.Schema>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetSchema"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.discoveryengine.v1beta.GetSchemaRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.discoveryengine.v1beta.Schema.getDefaultInstance()))
.setSchemaDescriptor(new SchemaServiceMethodDescriptorSupplier("GetSchema"))
.build();
}
}
}
return getGetSchemaMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1beta.ListSchemasRequest,
com.google.cloud.discoveryengine.v1beta.ListSchemasResponse>
getListSchemasMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "ListSchemas",
requestType = com.google.cloud.discoveryengine.v1beta.ListSchemasRequest.class,
responseType = com.google.cloud.discoveryengine.v1beta.ListSchemasResponse.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1beta.ListSchemasRequest,
com.google.cloud.discoveryengine.v1beta.ListSchemasResponse>
getListSchemasMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1beta.ListSchemasRequest,
com.google.cloud.discoveryengine.v1beta.ListSchemasResponse>
getListSchemasMethod;
if ((getListSchemasMethod = SchemaServiceGrpc.getListSchemasMethod) == null) {
synchronized (SchemaServiceGrpc.class) {
if ((getListSchemasMethod = SchemaServiceGrpc.getListSchemasMethod) == null) {
SchemaServiceGrpc.getListSchemasMethod =
getListSchemasMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.discoveryengine.v1beta.ListSchemasRequest,
com.google.cloud.discoveryengine.v1beta.ListSchemasResponse>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListSchemas"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.discoveryengine.v1beta.ListSchemasRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.discoveryengine.v1beta.ListSchemasResponse
.getDefaultInstance()))
.setSchemaDescriptor(new SchemaServiceMethodDescriptorSupplier("ListSchemas"))
.build();
}
}
}
return getListSchemasMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1beta.CreateSchemaRequest,
com.google.longrunning.Operation>
getCreateSchemaMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "CreateSchema",
requestType = com.google.cloud.discoveryengine.v1beta.CreateSchemaRequest.class,
responseType = com.google.longrunning.Operation.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1beta.CreateSchemaRequest,
com.google.longrunning.Operation>
getCreateSchemaMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1beta.CreateSchemaRequest,
com.google.longrunning.Operation>
getCreateSchemaMethod;
if ((getCreateSchemaMethod = SchemaServiceGrpc.getCreateSchemaMethod) == null) {
synchronized (SchemaServiceGrpc.class) {
if ((getCreateSchemaMethod = SchemaServiceGrpc.getCreateSchemaMethod) == null) {
SchemaServiceGrpc.getCreateSchemaMethod =
getCreateSchemaMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.discoveryengine.v1beta.CreateSchemaRequest,
com.google.longrunning.Operation>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "CreateSchema"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.discoveryengine.v1beta.CreateSchemaRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.longrunning.Operation.getDefaultInstance()))
.setSchemaDescriptor(
new SchemaServiceMethodDescriptorSupplier("CreateSchema"))
.build();
}
}
}
return getCreateSchemaMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1beta.UpdateSchemaRequest,
com.google.longrunning.Operation>
getUpdateSchemaMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "UpdateSchema",
requestType = com.google.cloud.discoveryengine.v1beta.UpdateSchemaRequest.class,
responseType = com.google.longrunning.Operation.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1beta.UpdateSchemaRequest,
com.google.longrunning.Operation>
getUpdateSchemaMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1beta.UpdateSchemaRequest,
com.google.longrunning.Operation>
getUpdateSchemaMethod;
if ((getUpdateSchemaMethod = SchemaServiceGrpc.getUpdateSchemaMethod) == null) {
synchronized (SchemaServiceGrpc.class) {
if ((getUpdateSchemaMethod = SchemaServiceGrpc.getUpdateSchemaMethod) == null) {
SchemaServiceGrpc.getUpdateSchemaMethod =
getUpdateSchemaMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.discoveryengine.v1beta.UpdateSchemaRequest,
com.google.longrunning.Operation>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "UpdateSchema"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.discoveryengine.v1beta.UpdateSchemaRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.longrunning.Operation.getDefaultInstance()))
.setSchemaDescriptor(
new SchemaServiceMethodDescriptorSupplier("UpdateSchema"))
.build();
}
}
}
return getUpdateSchemaMethod;
}
private static volatile io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1beta.DeleteSchemaRequest,
com.google.longrunning.Operation>
getDeleteSchemaMethod;
@io.grpc.stub.annotations.RpcMethod(
fullMethodName = SERVICE_NAME + '/' + "DeleteSchema",
requestType = com.google.cloud.discoveryengine.v1beta.DeleteSchemaRequest.class,
responseType = com.google.longrunning.Operation.class,
methodType = io.grpc.MethodDescriptor.MethodType.UNARY)
public static io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1beta.DeleteSchemaRequest,
com.google.longrunning.Operation>
getDeleteSchemaMethod() {
io.grpc.MethodDescriptor<
com.google.cloud.discoveryengine.v1beta.DeleteSchemaRequest,
com.google.longrunning.Operation>
getDeleteSchemaMethod;
if ((getDeleteSchemaMethod = SchemaServiceGrpc.getDeleteSchemaMethod) == null) {
synchronized (SchemaServiceGrpc.class) {
if ((getDeleteSchemaMethod = SchemaServiceGrpc.getDeleteSchemaMethod) == null) {
SchemaServiceGrpc.getDeleteSchemaMethod =
getDeleteSchemaMethod =
io.grpc.MethodDescriptor
.<com.google.cloud.discoveryengine.v1beta.DeleteSchemaRequest,
com.google.longrunning.Operation>
newBuilder()
.setType(io.grpc.MethodDescriptor.MethodType.UNARY)
.setFullMethodName(generateFullMethodName(SERVICE_NAME, "DeleteSchema"))
.setSampledToLocalTracing(true)
.setRequestMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.cloud.discoveryengine.v1beta.DeleteSchemaRequest
.getDefaultInstance()))
.setResponseMarshaller(
io.grpc.protobuf.ProtoUtils.marshaller(
com.google.longrunning.Operation.getDefaultInstance()))
.setSchemaDescriptor(
new SchemaServiceMethodDescriptorSupplier("DeleteSchema"))
.build();
}
}
}
return getDeleteSchemaMethod;
}
/** Creates a new async stub that supports all call types for the service */
public static SchemaServiceStub newStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<SchemaServiceStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<SchemaServiceStub>() {
@java.lang.Override
public SchemaServiceStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new SchemaServiceStub(channel, callOptions);
}
};
return SchemaServiceStub.newStub(factory, channel);
}
/** Creates a new blocking-style stub that supports all types of calls on the service */
public static SchemaServiceBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<SchemaServiceBlockingV2Stub> factory =
new io.grpc.stub.AbstractStub.StubFactory<SchemaServiceBlockingV2Stub>() {
@java.lang.Override
public SchemaServiceBlockingV2Stub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new SchemaServiceBlockingV2Stub(channel, callOptions);
}
};
return SchemaServiceBlockingV2Stub.newStub(factory, channel);
}
/**
* Creates a new blocking-style stub that supports unary and streaming output calls on the service
*/
public static SchemaServiceBlockingStub newBlockingStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<SchemaServiceBlockingStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<SchemaServiceBlockingStub>() {
@java.lang.Override
public SchemaServiceBlockingStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new SchemaServiceBlockingStub(channel, callOptions);
}
};
return SchemaServiceBlockingStub.newStub(factory, channel);
}
/** Creates a new ListenableFuture-style stub that supports unary calls on the service */
public static SchemaServiceFutureStub newFutureStub(io.grpc.Channel channel) {
io.grpc.stub.AbstractStub.StubFactory<SchemaServiceFutureStub> factory =
new io.grpc.stub.AbstractStub.StubFactory<SchemaServiceFutureStub>() {
@java.lang.Override
public SchemaServiceFutureStub newStub(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new SchemaServiceFutureStub(channel, callOptions);
}
};
return SchemaServiceFutureStub.newStub(factory, channel);
}
/**
*
*
* <pre>
* Service for managing [Schema][google.cloud.discoveryengine.v1beta.Schema]s.
* </pre>
*/
public interface AsyncService {
/**
*
*
* <pre>
* Gets a [Schema][google.cloud.discoveryengine.v1beta.Schema].
* </pre>
*/
default void getSchema(
com.google.cloud.discoveryengine.v1beta.GetSchemaRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.discoveryengine.v1beta.Schema>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetSchemaMethod(), responseObserver);
}
/**
*
*
* <pre>
* Gets a list of [Schema][google.cloud.discoveryengine.v1beta.Schema]s.
* </pre>
*/
default void listSchemas(
com.google.cloud.discoveryengine.v1beta.ListSchemasRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.discoveryengine.v1beta.ListSchemasResponse>
responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getListSchemasMethod(), responseObserver);
}
/**
*
*
* <pre>
* Creates a [Schema][google.cloud.discoveryengine.v1beta.Schema].
* </pre>
*/
default void createSchema(
com.google.cloud.discoveryengine.v1beta.CreateSchemaRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getCreateSchemaMethod(), responseObserver);
}
/**
*
*
* <pre>
* Updates a [Schema][google.cloud.discoveryengine.v1beta.Schema].
* </pre>
*/
default void updateSchema(
com.google.cloud.discoveryengine.v1beta.UpdateSchemaRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getUpdateSchemaMethod(), responseObserver);
}
/**
*
*
* <pre>
* Deletes a [Schema][google.cloud.discoveryengine.v1beta.Schema].
* </pre>
*/
default void deleteSchema(
com.google.cloud.discoveryengine.v1beta.DeleteSchemaRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(
getDeleteSchemaMethod(), responseObserver);
}
}
/**
* Base class for the server implementation of the service SchemaService.
*
* <pre>
* Service for managing [Schema][google.cloud.discoveryengine.v1beta.Schema]s.
* </pre>
*/
public abstract static class SchemaServiceImplBase
implements io.grpc.BindableService, AsyncService {
@java.lang.Override
public final io.grpc.ServerServiceDefinition bindService() {
return SchemaServiceGrpc.bindService(this);
}
}
/**
* A stub to allow clients to do asynchronous rpc calls to service SchemaService.
*
* <pre>
* Service for managing [Schema][google.cloud.discoveryengine.v1beta.Schema]s.
* </pre>
*/
public static final class SchemaServiceStub
extends io.grpc.stub.AbstractAsyncStub<SchemaServiceStub> {
private SchemaServiceStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected SchemaServiceStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new SchemaServiceStub(channel, callOptions);
}
/**
*
*
* <pre>
* Gets a [Schema][google.cloud.discoveryengine.v1beta.Schema].
* </pre>
*/
public void getSchema(
com.google.cloud.discoveryengine.v1beta.GetSchemaRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.discoveryengine.v1beta.Schema>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getGetSchemaMethod(), getCallOptions()), request, responseObserver);
}
/**
*
*
* <pre>
* Gets a list of [Schema][google.cloud.discoveryengine.v1beta.Schema]s.
* </pre>
*/
public void listSchemas(
com.google.cloud.discoveryengine.v1beta.ListSchemasRequest request,
io.grpc.stub.StreamObserver<com.google.cloud.discoveryengine.v1beta.ListSchemasResponse>
responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getListSchemasMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Creates a [Schema][google.cloud.discoveryengine.v1beta.Schema].
* </pre>
*/
public void createSchema(
com.google.cloud.discoveryengine.v1beta.CreateSchemaRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getCreateSchemaMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Updates a [Schema][google.cloud.discoveryengine.v1beta.Schema].
* </pre>
*/
public void updateSchema(
com.google.cloud.discoveryengine.v1beta.UpdateSchemaRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getUpdateSchemaMethod(), getCallOptions()),
request,
responseObserver);
}
/**
*
*
* <pre>
* Deletes a [Schema][google.cloud.discoveryengine.v1beta.Schema].
* </pre>
*/
public void deleteSchema(
com.google.cloud.discoveryengine.v1beta.DeleteSchemaRequest request,
io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) {
io.grpc.stub.ClientCalls.asyncUnaryCall(
getChannel().newCall(getDeleteSchemaMethod(), getCallOptions()),
request,
responseObserver);
}
}
/**
* A stub to allow clients to do synchronous rpc calls to service SchemaService.
*
* <pre>
* Service for managing [Schema][google.cloud.discoveryengine.v1beta.Schema]s.
* </pre>
*/
public static final class SchemaServiceBlockingV2Stub
extends io.grpc.stub.AbstractBlockingStub<SchemaServiceBlockingV2Stub> {
private SchemaServiceBlockingV2Stub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected SchemaServiceBlockingV2Stub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new SchemaServiceBlockingV2Stub(channel, callOptions);
}
/**
*
*
* <pre>
* Gets a [Schema][google.cloud.discoveryengine.v1beta.Schema].
* </pre>
*/
public com.google.cloud.discoveryengine.v1beta.Schema getSchema(
com.google.cloud.discoveryengine.v1beta.GetSchemaRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetSchemaMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Gets a list of [Schema][google.cloud.discoveryengine.v1beta.Schema]s.
* </pre>
*/
public com.google.cloud.discoveryengine.v1beta.ListSchemasResponse listSchemas(
com.google.cloud.discoveryengine.v1beta.ListSchemasRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getListSchemasMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Creates a [Schema][google.cloud.discoveryengine.v1beta.Schema].
* </pre>
*/
public com.google.longrunning.Operation createSchema(
com.google.cloud.discoveryengine.v1beta.CreateSchemaRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCreateSchemaMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Updates a [Schema][google.cloud.discoveryengine.v1beta.Schema].
* </pre>
*/
public com.google.longrunning.Operation updateSchema(
com.google.cloud.discoveryengine.v1beta.UpdateSchemaRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getUpdateSchemaMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Deletes a [Schema][google.cloud.discoveryengine.v1beta.Schema].
* </pre>
*/
public com.google.longrunning.Operation deleteSchema(
com.google.cloud.discoveryengine.v1beta.DeleteSchemaRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getDeleteSchemaMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do limited synchronous rpc calls to service SchemaService.
*
* <pre>
* Service for managing [Schema][google.cloud.discoveryengine.v1beta.Schema]s.
* </pre>
*/
public static final class SchemaServiceBlockingStub
extends io.grpc.stub.AbstractBlockingStub<SchemaServiceBlockingStub> {
private SchemaServiceBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected SchemaServiceBlockingStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new SchemaServiceBlockingStub(channel, callOptions);
}
/**
*
*
* <pre>
* Gets a [Schema][google.cloud.discoveryengine.v1beta.Schema].
* </pre>
*/
public com.google.cloud.discoveryengine.v1beta.Schema getSchema(
com.google.cloud.discoveryengine.v1beta.GetSchemaRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getGetSchemaMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Gets a list of [Schema][google.cloud.discoveryengine.v1beta.Schema]s.
* </pre>
*/
public com.google.cloud.discoveryengine.v1beta.ListSchemasResponse listSchemas(
com.google.cloud.discoveryengine.v1beta.ListSchemasRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getListSchemasMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Creates a [Schema][google.cloud.discoveryengine.v1beta.Schema].
* </pre>
*/
public com.google.longrunning.Operation createSchema(
com.google.cloud.discoveryengine.v1beta.CreateSchemaRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getCreateSchemaMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Updates a [Schema][google.cloud.discoveryengine.v1beta.Schema].
* </pre>
*/
public com.google.longrunning.Operation updateSchema(
com.google.cloud.discoveryengine.v1beta.UpdateSchemaRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getUpdateSchemaMethod(), getCallOptions(), request);
}
/**
*
*
* <pre>
* Deletes a [Schema][google.cloud.discoveryengine.v1beta.Schema].
* </pre>
*/
public com.google.longrunning.Operation deleteSchema(
com.google.cloud.discoveryengine.v1beta.DeleteSchemaRequest request) {
return io.grpc.stub.ClientCalls.blockingUnaryCall(
getChannel(), getDeleteSchemaMethod(), getCallOptions(), request);
}
}
/**
* A stub to allow clients to do ListenableFuture-style rpc calls to service SchemaService.
*
* <pre>
* Service for managing [Schema][google.cloud.discoveryengine.v1beta.Schema]s.
* </pre>
*/
public static final class SchemaServiceFutureStub
extends io.grpc.stub.AbstractFutureStub<SchemaServiceFutureStub> {
private SchemaServiceFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
super(channel, callOptions);
}
@java.lang.Override
protected SchemaServiceFutureStub build(
io.grpc.Channel channel, io.grpc.CallOptions callOptions) {
return new SchemaServiceFutureStub(channel, callOptions);
}
/**
*
*
* <pre>
* Gets a [Schema][google.cloud.discoveryengine.v1beta.Schema].
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.discoveryengine.v1beta.Schema>
getSchema(com.google.cloud.discoveryengine.v1beta.GetSchemaRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getGetSchemaMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Gets a list of [Schema][google.cloud.discoveryengine.v1beta.Schema]s.
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<
com.google.cloud.discoveryengine.v1beta.ListSchemasResponse>
listSchemas(com.google.cloud.discoveryengine.v1beta.ListSchemasRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getListSchemasMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Creates a [Schema][google.cloud.discoveryengine.v1beta.Schema].
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation>
createSchema(com.google.cloud.discoveryengine.v1beta.CreateSchemaRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getCreateSchemaMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Updates a [Schema][google.cloud.discoveryengine.v1beta.Schema].
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation>
updateSchema(com.google.cloud.discoveryengine.v1beta.UpdateSchemaRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getUpdateSchemaMethod(), getCallOptions()), request);
}
/**
*
*
* <pre>
* Deletes a [Schema][google.cloud.discoveryengine.v1beta.Schema].
* </pre>
*/
public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation>
deleteSchema(com.google.cloud.discoveryengine.v1beta.DeleteSchemaRequest request) {
return io.grpc.stub.ClientCalls.futureUnaryCall(
getChannel().newCall(getDeleteSchemaMethod(), getCallOptions()), request);
}
}
private static final int METHODID_GET_SCHEMA = 0;
private static final int METHODID_LIST_SCHEMAS = 1;
private static final int METHODID_CREATE_SCHEMA = 2;
private static final int METHODID_UPDATE_SCHEMA = 3;
private static final int METHODID_DELETE_SCHEMA = 4;
private static final class MethodHandlers<Req, Resp>
implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>,
io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> {
private final AsyncService serviceImpl;
private final int methodId;
MethodHandlers(AsyncService serviceImpl, int methodId) {
this.serviceImpl = serviceImpl;
this.methodId = methodId;
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
case METHODID_GET_SCHEMA:
serviceImpl.getSchema(
(com.google.cloud.discoveryengine.v1beta.GetSchemaRequest) request,
(io.grpc.stub.StreamObserver<com.google.cloud.discoveryengine.v1beta.Schema>)
responseObserver);
break;
case METHODID_LIST_SCHEMAS:
serviceImpl.listSchemas(
(com.google.cloud.discoveryengine.v1beta.ListSchemasRequest) request,
(io.grpc.stub.StreamObserver<
com.google.cloud.discoveryengine.v1beta.ListSchemasResponse>)
responseObserver);
break;
case METHODID_CREATE_SCHEMA:
serviceImpl.createSchema(
(com.google.cloud.discoveryengine.v1beta.CreateSchemaRequest) request,
(io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver);
break;
case METHODID_UPDATE_SCHEMA:
serviceImpl.updateSchema(
(com.google.cloud.discoveryengine.v1beta.UpdateSchemaRequest) request,
(io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver);
break;
case METHODID_DELETE_SCHEMA:
serviceImpl.deleteSchema(
(com.google.cloud.discoveryengine.v1beta.DeleteSchemaRequest) request,
(io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver);
break;
default:
throw new AssertionError();
}
}
@java.lang.Override
@java.lang.SuppressWarnings("unchecked")
public io.grpc.stub.StreamObserver<Req> invoke(
io.grpc.stub.StreamObserver<Resp> responseObserver) {
switch (methodId) {
default:
throw new AssertionError();
}
}
}
public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) {
return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor())
.addMethod(
getGetSchemaMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.discoveryengine.v1beta.GetSchemaRequest,
com.google.cloud.discoveryengine.v1beta.Schema>(service, METHODID_GET_SCHEMA)))
.addMethod(
getListSchemasMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.discoveryengine.v1beta.ListSchemasRequest,
com.google.cloud.discoveryengine.v1beta.ListSchemasResponse>(
service, METHODID_LIST_SCHEMAS)))
.addMethod(
getCreateSchemaMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.discoveryengine.v1beta.CreateSchemaRequest,
com.google.longrunning.Operation>(service, METHODID_CREATE_SCHEMA)))
.addMethod(
getUpdateSchemaMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.discoveryengine.v1beta.UpdateSchemaRequest,
com.google.longrunning.Operation>(service, METHODID_UPDATE_SCHEMA)))
.addMethod(
getDeleteSchemaMethod(),
io.grpc.stub.ServerCalls.asyncUnaryCall(
new MethodHandlers<
com.google.cloud.discoveryengine.v1beta.DeleteSchemaRequest,
com.google.longrunning.Operation>(service, METHODID_DELETE_SCHEMA)))
.build();
}
private abstract static class SchemaServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoFileDescriptorSupplier,
io.grpc.protobuf.ProtoServiceDescriptorSupplier {
SchemaServiceBaseDescriptorSupplier() {}
@java.lang.Override
public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() {
return com.google.cloud.discoveryengine.v1beta.SchemaServiceProto.getDescriptor();
}
@java.lang.Override
public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() {
return getFileDescriptor().findServiceByName("SchemaService");
}
}
private static final class SchemaServiceFileDescriptorSupplier
extends SchemaServiceBaseDescriptorSupplier {
SchemaServiceFileDescriptorSupplier() {}
}
private static final class SchemaServiceMethodDescriptorSupplier
extends SchemaServiceBaseDescriptorSupplier
implements io.grpc.protobuf.ProtoMethodDescriptorSupplier {
private final java.lang.String methodName;
SchemaServiceMethodDescriptorSupplier(java.lang.String methodName) {
this.methodName = methodName;
}
@java.lang.Override
public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() {
return getServiceDescriptor().findMethodByName(methodName);
}
}
private static volatile io.grpc.ServiceDescriptor serviceDescriptor;
public static io.grpc.ServiceDescriptor getServiceDescriptor() {
io.grpc.ServiceDescriptor result = serviceDescriptor;
if (result == null) {
synchronized (SchemaServiceGrpc.class) {
result = serviceDescriptor;
if (result == null) {
serviceDescriptor =
result =
io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME)
.setSchemaDescriptor(new SchemaServiceFileDescriptorSupplier())
.addMethod(getGetSchemaMethod())
.addMethod(getListSchemasMethod())
.addMethod(getCreateSchemaMethod())
.addMethod(getUpdateSchemaMethod())
.addMethod(getDeleteSchemaMethod())
.build();
}
}
}
return result;
}
}
|
googleads/google-ads-java | 37,132 | google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/services/TrendInsight.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v19/services/content_creator_insights_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v19.services;
/**
* <pre>
* A trend insight for a given attribute.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.services.TrendInsight}
*/
public final class TrendInsight extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v19.services.TrendInsight)
TrendInsightOrBuilder {
private static final long serialVersionUID = 0L;
// Use TrendInsight.newBuilder() to construct.
private TrendInsight(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TrendInsight() {
trend_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new TrendInsight();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.services.ContentCreatorInsightsServiceProto.internal_static_google_ads_googleads_v19_services_TrendInsight_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.services.ContentCreatorInsightsServiceProto.internal_static_google_ads_googleads_v19_services_TrendInsight_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.services.TrendInsight.class, com.google.ads.googleads.v19.services.TrendInsight.Builder.class);
}
private int bitField0_;
public static final int TREND_ATTRIBUTE_FIELD_NUMBER = 1;
private com.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata trendAttribute_;
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
* @return Whether the trendAttribute field is set.
*/
@java.lang.Override
public boolean hasTrendAttribute() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
* @return The trendAttribute.
*/
@java.lang.Override
public com.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata getTrendAttribute() {
return trendAttribute_ == null ? com.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata.getDefaultInstance() : trendAttribute_;
}
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadataOrBuilder getTrendAttributeOrBuilder() {
return trendAttribute_ == null ? com.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata.getDefaultInstance() : trendAttribute_;
}
public static final int TREND_METRICS_FIELD_NUMBER = 2;
private com.google.ads.googleads.v19.services.TrendInsightMetrics trendMetrics_;
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v19.services.TrendInsightMetrics trend_metrics = 2;</code>
* @return Whether the trendMetrics field is set.
*/
@java.lang.Override
public boolean hasTrendMetrics() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v19.services.TrendInsightMetrics trend_metrics = 2;</code>
* @return The trendMetrics.
*/
@java.lang.Override
public com.google.ads.googleads.v19.services.TrendInsightMetrics getTrendMetrics() {
return trendMetrics_ == null ? com.google.ads.googleads.v19.services.TrendInsightMetrics.getDefaultInstance() : trendMetrics_;
}
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v19.services.TrendInsightMetrics trend_metrics = 2;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v19.services.TrendInsightMetricsOrBuilder getTrendMetricsOrBuilder() {
return trendMetrics_ == null ? com.google.ads.googleads.v19.services.TrendInsightMetrics.getDefaultInstance() : trendMetrics_;
}
public static final int TREND_FIELD_NUMBER = 3;
private int trend_ = 0;
/**
* <pre>
* The direction of trend (such as RISING or DECLINING).
* </pre>
*
* <code>.google.ads.googleads.v19.enums.InsightsTrendEnum.InsightsTrend trend = 3;</code>
* @return The enum numeric value on the wire for trend.
*/
@java.lang.Override public int getTrendValue() {
return trend_;
}
/**
* <pre>
* The direction of trend (such as RISING or DECLINING).
* </pre>
*
* <code>.google.ads.googleads.v19.enums.InsightsTrendEnum.InsightsTrend trend = 3;</code>
* @return The trend.
*/
@java.lang.Override public com.google.ads.googleads.v19.enums.InsightsTrendEnum.InsightsTrend getTrend() {
com.google.ads.googleads.v19.enums.InsightsTrendEnum.InsightsTrend result = com.google.ads.googleads.v19.enums.InsightsTrendEnum.InsightsTrend.forNumber(trend_);
return result == null ? com.google.ads.googleads.v19.enums.InsightsTrendEnum.InsightsTrend.UNRECOGNIZED : result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getTrendAttribute());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getTrendMetrics());
}
if (trend_ != com.google.ads.googleads.v19.enums.InsightsTrendEnum.InsightsTrend.UNSPECIFIED.getNumber()) {
output.writeEnum(3, trend_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, getTrendAttribute());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, getTrendMetrics());
}
if (trend_ != com.google.ads.googleads.v19.enums.InsightsTrendEnum.InsightsTrend.UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(3, trend_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v19.services.TrendInsight)) {
return super.equals(obj);
}
com.google.ads.googleads.v19.services.TrendInsight other = (com.google.ads.googleads.v19.services.TrendInsight) obj;
if (hasTrendAttribute() != other.hasTrendAttribute()) return false;
if (hasTrendAttribute()) {
if (!getTrendAttribute()
.equals(other.getTrendAttribute())) return false;
}
if (hasTrendMetrics() != other.hasTrendMetrics()) return false;
if (hasTrendMetrics()) {
if (!getTrendMetrics()
.equals(other.getTrendMetrics())) return false;
}
if (trend_ != other.trend_) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasTrendAttribute()) {
hash = (37 * hash) + TREND_ATTRIBUTE_FIELD_NUMBER;
hash = (53 * hash) + getTrendAttribute().hashCode();
}
if (hasTrendMetrics()) {
hash = (37 * hash) + TREND_METRICS_FIELD_NUMBER;
hash = (53 * hash) + getTrendMetrics().hashCode();
}
hash = (37 * hash) + TREND_FIELD_NUMBER;
hash = (53 * hash) + trend_;
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v19.services.TrendInsight parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.TrendInsight parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.TrendInsight parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.TrendInsight parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.TrendInsight parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.TrendInsight parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.TrendInsight parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.TrendInsight parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.TrendInsight parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.TrendInsight parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.TrendInsight parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.TrendInsight parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v19.services.TrendInsight prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* A trend insight for a given attribute.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.services.TrendInsight}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.services.TrendInsight)
com.google.ads.googleads.v19.services.TrendInsightOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.services.ContentCreatorInsightsServiceProto.internal_static_google_ads_googleads_v19_services_TrendInsight_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.services.ContentCreatorInsightsServiceProto.internal_static_google_ads_googleads_v19_services_TrendInsight_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.services.TrendInsight.class, com.google.ads.googleads.v19.services.TrendInsight.Builder.class);
}
// Construct using com.google.ads.googleads.v19.services.TrendInsight.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getTrendAttributeFieldBuilder();
getTrendMetricsFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
trendAttribute_ = null;
if (trendAttributeBuilder_ != null) {
trendAttributeBuilder_.dispose();
trendAttributeBuilder_ = null;
}
trendMetrics_ = null;
if (trendMetricsBuilder_ != null) {
trendMetricsBuilder_.dispose();
trendMetricsBuilder_ = null;
}
trend_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v19.services.ContentCreatorInsightsServiceProto.internal_static_google_ads_googleads_v19_services_TrendInsight_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.TrendInsight getDefaultInstanceForType() {
return com.google.ads.googleads.v19.services.TrendInsight.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v19.services.TrendInsight build() {
com.google.ads.googleads.v19.services.TrendInsight result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.TrendInsight buildPartial() {
com.google.ads.googleads.v19.services.TrendInsight result = new com.google.ads.googleads.v19.services.TrendInsight(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v19.services.TrendInsight result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.trendAttribute_ = trendAttributeBuilder_ == null
? trendAttribute_
: trendAttributeBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.trendMetrics_ = trendMetricsBuilder_ == null
? trendMetrics_
: trendMetricsBuilder_.build();
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.trend_ = trend_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v19.services.TrendInsight) {
return mergeFrom((com.google.ads.googleads.v19.services.TrendInsight)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v19.services.TrendInsight other) {
if (other == com.google.ads.googleads.v19.services.TrendInsight.getDefaultInstance()) return this;
if (other.hasTrendAttribute()) {
mergeTrendAttribute(other.getTrendAttribute());
}
if (other.hasTrendMetrics()) {
mergeTrendMetrics(other.getTrendMetrics());
}
if (other.trend_ != 0) {
setTrendValue(other.getTrendValue());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getTrendAttributeFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
input.readMessage(
getTrendMetricsFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 24: {
trend_ = input.readEnum();
bitField0_ |= 0x00000004;
break;
} // case 24
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata trendAttribute_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata, com.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata.Builder, com.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadataOrBuilder> trendAttributeBuilder_;
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
* @return Whether the trendAttribute field is set.
*/
public boolean hasTrendAttribute() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
* @return The trendAttribute.
*/
public com.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata getTrendAttribute() {
if (trendAttributeBuilder_ == null) {
return trendAttribute_ == null ? com.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata.getDefaultInstance() : trendAttribute_;
} else {
return trendAttributeBuilder_.getMessage();
}
}
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
*/
public Builder setTrendAttribute(com.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata value) {
if (trendAttributeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
trendAttribute_ = value;
} else {
trendAttributeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
*/
public Builder setTrendAttribute(
com.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata.Builder builderForValue) {
if (trendAttributeBuilder_ == null) {
trendAttribute_ = builderForValue.build();
} else {
trendAttributeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
*/
public Builder mergeTrendAttribute(com.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata value) {
if (trendAttributeBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
trendAttribute_ != null &&
trendAttribute_ != com.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata.getDefaultInstance()) {
getTrendAttributeBuilder().mergeFrom(value);
} else {
trendAttribute_ = value;
}
} else {
trendAttributeBuilder_.mergeFrom(value);
}
if (trendAttribute_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
*/
public Builder clearTrendAttribute() {
bitField0_ = (bitField0_ & ~0x00000001);
trendAttribute_ = null;
if (trendAttributeBuilder_ != null) {
trendAttributeBuilder_.dispose();
trendAttributeBuilder_ = null;
}
onChanged();
return this;
}
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
*/
public com.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata.Builder getTrendAttributeBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTrendAttributeFieldBuilder().getBuilder();
}
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
*/
public com.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadataOrBuilder getTrendAttributeOrBuilder() {
if (trendAttributeBuilder_ != null) {
return trendAttributeBuilder_.getMessageOrBuilder();
} else {
return trendAttribute_ == null ?
com.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata.getDefaultInstance() : trendAttribute_;
}
}
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata, com.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata.Builder, com.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadataOrBuilder>
getTrendAttributeFieldBuilder() {
if (trendAttributeBuilder_ == null) {
trendAttributeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata, com.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadata.Builder, com.google.ads.googleads.v19.common.AudienceInsightsAttributeMetadataOrBuilder>(
getTrendAttribute(),
getParentForChildren(),
isClean());
trendAttribute_ = null;
}
return trendAttributeBuilder_;
}
private com.google.ads.googleads.v19.services.TrendInsightMetrics trendMetrics_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v19.services.TrendInsightMetrics, com.google.ads.googleads.v19.services.TrendInsightMetrics.Builder, com.google.ads.googleads.v19.services.TrendInsightMetricsOrBuilder> trendMetricsBuilder_;
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v19.services.TrendInsightMetrics trend_metrics = 2;</code>
* @return Whether the trendMetrics field is set.
*/
public boolean hasTrendMetrics() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v19.services.TrendInsightMetrics trend_metrics = 2;</code>
* @return The trendMetrics.
*/
public com.google.ads.googleads.v19.services.TrendInsightMetrics getTrendMetrics() {
if (trendMetricsBuilder_ == null) {
return trendMetrics_ == null ? com.google.ads.googleads.v19.services.TrendInsightMetrics.getDefaultInstance() : trendMetrics_;
} else {
return trendMetricsBuilder_.getMessage();
}
}
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v19.services.TrendInsightMetrics trend_metrics = 2;</code>
*/
public Builder setTrendMetrics(com.google.ads.googleads.v19.services.TrendInsightMetrics value) {
if (trendMetricsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
trendMetrics_ = value;
} else {
trendMetricsBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v19.services.TrendInsightMetrics trend_metrics = 2;</code>
*/
public Builder setTrendMetrics(
com.google.ads.googleads.v19.services.TrendInsightMetrics.Builder builderForValue) {
if (trendMetricsBuilder_ == null) {
trendMetrics_ = builderForValue.build();
} else {
trendMetricsBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v19.services.TrendInsightMetrics trend_metrics = 2;</code>
*/
public Builder mergeTrendMetrics(com.google.ads.googleads.v19.services.TrendInsightMetrics value) {
if (trendMetricsBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0) &&
trendMetrics_ != null &&
trendMetrics_ != com.google.ads.googleads.v19.services.TrendInsightMetrics.getDefaultInstance()) {
getTrendMetricsBuilder().mergeFrom(value);
} else {
trendMetrics_ = value;
}
} else {
trendMetricsBuilder_.mergeFrom(value);
}
if (trendMetrics_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v19.services.TrendInsightMetrics trend_metrics = 2;</code>
*/
public Builder clearTrendMetrics() {
bitField0_ = (bitField0_ & ~0x00000002);
trendMetrics_ = null;
if (trendMetricsBuilder_ != null) {
trendMetricsBuilder_.dispose();
trendMetricsBuilder_ = null;
}
onChanged();
return this;
}
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v19.services.TrendInsightMetrics trend_metrics = 2;</code>
*/
public com.google.ads.googleads.v19.services.TrendInsightMetrics.Builder getTrendMetricsBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getTrendMetricsFieldBuilder().getBuilder();
}
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v19.services.TrendInsightMetrics trend_metrics = 2;</code>
*/
public com.google.ads.googleads.v19.services.TrendInsightMetricsOrBuilder getTrendMetricsOrBuilder() {
if (trendMetricsBuilder_ != null) {
return trendMetricsBuilder_.getMessageOrBuilder();
} else {
return trendMetrics_ == null ?
com.google.ads.googleads.v19.services.TrendInsightMetrics.getDefaultInstance() : trendMetrics_;
}
}
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v19.services.TrendInsightMetrics trend_metrics = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v19.services.TrendInsightMetrics, com.google.ads.googleads.v19.services.TrendInsightMetrics.Builder, com.google.ads.googleads.v19.services.TrendInsightMetricsOrBuilder>
getTrendMetricsFieldBuilder() {
if (trendMetricsBuilder_ == null) {
trendMetricsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v19.services.TrendInsightMetrics, com.google.ads.googleads.v19.services.TrendInsightMetrics.Builder, com.google.ads.googleads.v19.services.TrendInsightMetricsOrBuilder>(
getTrendMetrics(),
getParentForChildren(),
isClean());
trendMetrics_ = null;
}
return trendMetricsBuilder_;
}
private int trend_ = 0;
/**
* <pre>
* The direction of trend (such as RISING or DECLINING).
* </pre>
*
* <code>.google.ads.googleads.v19.enums.InsightsTrendEnum.InsightsTrend trend = 3;</code>
* @return The enum numeric value on the wire for trend.
*/
@java.lang.Override public int getTrendValue() {
return trend_;
}
/**
* <pre>
* The direction of trend (such as RISING or DECLINING).
* </pre>
*
* <code>.google.ads.googleads.v19.enums.InsightsTrendEnum.InsightsTrend trend = 3;</code>
* @param value The enum numeric value on the wire for trend to set.
* @return This builder for chaining.
*/
public Builder setTrendValue(int value) {
trend_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* The direction of trend (such as RISING or DECLINING).
* </pre>
*
* <code>.google.ads.googleads.v19.enums.InsightsTrendEnum.InsightsTrend trend = 3;</code>
* @return The trend.
*/
@java.lang.Override
public com.google.ads.googleads.v19.enums.InsightsTrendEnum.InsightsTrend getTrend() {
com.google.ads.googleads.v19.enums.InsightsTrendEnum.InsightsTrend result = com.google.ads.googleads.v19.enums.InsightsTrendEnum.InsightsTrend.forNumber(trend_);
return result == null ? com.google.ads.googleads.v19.enums.InsightsTrendEnum.InsightsTrend.UNRECOGNIZED : result;
}
/**
* <pre>
* The direction of trend (such as RISING or DECLINING).
* </pre>
*
* <code>.google.ads.googleads.v19.enums.InsightsTrendEnum.InsightsTrend trend = 3;</code>
* @param value The trend to set.
* @return This builder for chaining.
*/
public Builder setTrend(com.google.ads.googleads.v19.enums.InsightsTrendEnum.InsightsTrend value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
trend_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* The direction of trend (such as RISING or DECLINING).
* </pre>
*
* <code>.google.ads.googleads.v19.enums.InsightsTrendEnum.InsightsTrend trend = 3;</code>
* @return This builder for chaining.
*/
public Builder clearTrend() {
bitField0_ = (bitField0_ & ~0x00000004);
trend_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.services.TrendInsight)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v19.services.TrendInsight)
private static final com.google.ads.googleads.v19.services.TrendInsight DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v19.services.TrendInsight();
}
public static com.google.ads.googleads.v19.services.TrendInsight getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<TrendInsight>
PARSER = new com.google.protobuf.AbstractParser<TrendInsight>() {
@java.lang.Override
public TrendInsight parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<TrendInsight> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<TrendInsight> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.TrendInsight getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 37,132 | google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/services/TrendInsight.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v20/services/content_creator_insights_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v20.services;
/**
* <pre>
* A trend insight for a given attribute.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.services.TrendInsight}
*/
public final class TrendInsight extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v20.services.TrendInsight)
TrendInsightOrBuilder {
private static final long serialVersionUID = 0L;
// Use TrendInsight.newBuilder() to construct.
private TrendInsight(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TrendInsight() {
trend_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new TrendInsight();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.services.ContentCreatorInsightsServiceProto.internal_static_google_ads_googleads_v20_services_TrendInsight_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.services.ContentCreatorInsightsServiceProto.internal_static_google_ads_googleads_v20_services_TrendInsight_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.services.TrendInsight.class, com.google.ads.googleads.v20.services.TrendInsight.Builder.class);
}
private int bitField0_;
public static final int TREND_ATTRIBUTE_FIELD_NUMBER = 1;
private com.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata trendAttribute_;
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
* @return Whether the trendAttribute field is set.
*/
@java.lang.Override
public boolean hasTrendAttribute() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
* @return The trendAttribute.
*/
@java.lang.Override
public com.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata getTrendAttribute() {
return trendAttribute_ == null ? com.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata.getDefaultInstance() : trendAttribute_;
}
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadataOrBuilder getTrendAttributeOrBuilder() {
return trendAttribute_ == null ? com.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata.getDefaultInstance() : trendAttribute_;
}
public static final int TREND_METRICS_FIELD_NUMBER = 2;
private com.google.ads.googleads.v20.services.TrendInsightMetrics trendMetrics_;
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v20.services.TrendInsightMetrics trend_metrics = 2;</code>
* @return Whether the trendMetrics field is set.
*/
@java.lang.Override
public boolean hasTrendMetrics() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v20.services.TrendInsightMetrics trend_metrics = 2;</code>
* @return The trendMetrics.
*/
@java.lang.Override
public com.google.ads.googleads.v20.services.TrendInsightMetrics getTrendMetrics() {
return trendMetrics_ == null ? com.google.ads.googleads.v20.services.TrendInsightMetrics.getDefaultInstance() : trendMetrics_;
}
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v20.services.TrendInsightMetrics trend_metrics = 2;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v20.services.TrendInsightMetricsOrBuilder getTrendMetricsOrBuilder() {
return trendMetrics_ == null ? com.google.ads.googleads.v20.services.TrendInsightMetrics.getDefaultInstance() : trendMetrics_;
}
public static final int TREND_FIELD_NUMBER = 3;
private int trend_ = 0;
/**
* <pre>
* The direction of trend (such as RISING or DECLINING).
* </pre>
*
* <code>.google.ads.googleads.v20.enums.InsightsTrendEnum.InsightsTrend trend = 3;</code>
* @return The enum numeric value on the wire for trend.
*/
@java.lang.Override public int getTrendValue() {
return trend_;
}
/**
* <pre>
* The direction of trend (such as RISING or DECLINING).
* </pre>
*
* <code>.google.ads.googleads.v20.enums.InsightsTrendEnum.InsightsTrend trend = 3;</code>
* @return The trend.
*/
@java.lang.Override public com.google.ads.googleads.v20.enums.InsightsTrendEnum.InsightsTrend getTrend() {
com.google.ads.googleads.v20.enums.InsightsTrendEnum.InsightsTrend result = com.google.ads.googleads.v20.enums.InsightsTrendEnum.InsightsTrend.forNumber(trend_);
return result == null ? com.google.ads.googleads.v20.enums.InsightsTrendEnum.InsightsTrend.UNRECOGNIZED : result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getTrendAttribute());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getTrendMetrics());
}
if (trend_ != com.google.ads.googleads.v20.enums.InsightsTrendEnum.InsightsTrend.UNSPECIFIED.getNumber()) {
output.writeEnum(3, trend_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, getTrendAttribute());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, getTrendMetrics());
}
if (trend_ != com.google.ads.googleads.v20.enums.InsightsTrendEnum.InsightsTrend.UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(3, trend_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v20.services.TrendInsight)) {
return super.equals(obj);
}
com.google.ads.googleads.v20.services.TrendInsight other = (com.google.ads.googleads.v20.services.TrendInsight) obj;
if (hasTrendAttribute() != other.hasTrendAttribute()) return false;
if (hasTrendAttribute()) {
if (!getTrendAttribute()
.equals(other.getTrendAttribute())) return false;
}
if (hasTrendMetrics() != other.hasTrendMetrics()) return false;
if (hasTrendMetrics()) {
if (!getTrendMetrics()
.equals(other.getTrendMetrics())) return false;
}
if (trend_ != other.trend_) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasTrendAttribute()) {
hash = (37 * hash) + TREND_ATTRIBUTE_FIELD_NUMBER;
hash = (53 * hash) + getTrendAttribute().hashCode();
}
if (hasTrendMetrics()) {
hash = (37 * hash) + TREND_METRICS_FIELD_NUMBER;
hash = (53 * hash) + getTrendMetrics().hashCode();
}
hash = (37 * hash) + TREND_FIELD_NUMBER;
hash = (53 * hash) + trend_;
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v20.services.TrendInsight parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.TrendInsight parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.TrendInsight parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.TrendInsight parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.TrendInsight parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.TrendInsight parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.TrendInsight parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.TrendInsight parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.TrendInsight parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.TrendInsight parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.TrendInsight parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.TrendInsight parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v20.services.TrendInsight prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* A trend insight for a given attribute.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.services.TrendInsight}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.services.TrendInsight)
com.google.ads.googleads.v20.services.TrendInsightOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.services.ContentCreatorInsightsServiceProto.internal_static_google_ads_googleads_v20_services_TrendInsight_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.services.ContentCreatorInsightsServiceProto.internal_static_google_ads_googleads_v20_services_TrendInsight_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.services.TrendInsight.class, com.google.ads.googleads.v20.services.TrendInsight.Builder.class);
}
// Construct using com.google.ads.googleads.v20.services.TrendInsight.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getTrendAttributeFieldBuilder();
getTrendMetricsFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
trendAttribute_ = null;
if (trendAttributeBuilder_ != null) {
trendAttributeBuilder_.dispose();
trendAttributeBuilder_ = null;
}
trendMetrics_ = null;
if (trendMetricsBuilder_ != null) {
trendMetricsBuilder_.dispose();
trendMetricsBuilder_ = null;
}
trend_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v20.services.ContentCreatorInsightsServiceProto.internal_static_google_ads_googleads_v20_services_TrendInsight_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.TrendInsight getDefaultInstanceForType() {
return com.google.ads.googleads.v20.services.TrendInsight.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v20.services.TrendInsight build() {
com.google.ads.googleads.v20.services.TrendInsight result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.TrendInsight buildPartial() {
com.google.ads.googleads.v20.services.TrendInsight result = new com.google.ads.googleads.v20.services.TrendInsight(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v20.services.TrendInsight result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.trendAttribute_ = trendAttributeBuilder_ == null
? trendAttribute_
: trendAttributeBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.trendMetrics_ = trendMetricsBuilder_ == null
? trendMetrics_
: trendMetricsBuilder_.build();
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.trend_ = trend_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v20.services.TrendInsight) {
return mergeFrom((com.google.ads.googleads.v20.services.TrendInsight)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v20.services.TrendInsight other) {
if (other == com.google.ads.googleads.v20.services.TrendInsight.getDefaultInstance()) return this;
if (other.hasTrendAttribute()) {
mergeTrendAttribute(other.getTrendAttribute());
}
if (other.hasTrendMetrics()) {
mergeTrendMetrics(other.getTrendMetrics());
}
if (other.trend_ != 0) {
setTrendValue(other.getTrendValue());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getTrendAttributeFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
input.readMessage(
getTrendMetricsFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 24: {
trend_ = input.readEnum();
bitField0_ |= 0x00000004;
break;
} // case 24
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata trendAttribute_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata, com.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata.Builder, com.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadataOrBuilder> trendAttributeBuilder_;
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
* @return Whether the trendAttribute field is set.
*/
public boolean hasTrendAttribute() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
* @return The trendAttribute.
*/
public com.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata getTrendAttribute() {
if (trendAttributeBuilder_ == null) {
return trendAttribute_ == null ? com.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata.getDefaultInstance() : trendAttribute_;
} else {
return trendAttributeBuilder_.getMessage();
}
}
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
*/
public Builder setTrendAttribute(com.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata value) {
if (trendAttributeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
trendAttribute_ = value;
} else {
trendAttributeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
*/
public Builder setTrendAttribute(
com.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata.Builder builderForValue) {
if (trendAttributeBuilder_ == null) {
trendAttribute_ = builderForValue.build();
} else {
trendAttributeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
*/
public Builder mergeTrendAttribute(com.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata value) {
if (trendAttributeBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
trendAttribute_ != null &&
trendAttribute_ != com.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata.getDefaultInstance()) {
getTrendAttributeBuilder().mergeFrom(value);
} else {
trendAttribute_ = value;
}
} else {
trendAttributeBuilder_.mergeFrom(value);
}
if (trendAttribute_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
*/
public Builder clearTrendAttribute() {
bitField0_ = (bitField0_ & ~0x00000001);
trendAttribute_ = null;
if (trendAttributeBuilder_ != null) {
trendAttributeBuilder_.dispose();
trendAttributeBuilder_ = null;
}
onChanged();
return this;
}
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
*/
public com.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata.Builder getTrendAttributeBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTrendAttributeFieldBuilder().getBuilder();
}
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
*/
public com.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadataOrBuilder getTrendAttributeOrBuilder() {
if (trendAttributeBuilder_ != null) {
return trendAttributeBuilder_.getMessageOrBuilder();
} else {
return trendAttribute_ == null ?
com.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata.getDefaultInstance() : trendAttribute_;
}
}
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata, com.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata.Builder, com.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadataOrBuilder>
getTrendAttributeFieldBuilder() {
if (trendAttributeBuilder_ == null) {
trendAttributeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata, com.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadata.Builder, com.google.ads.googleads.v20.common.AudienceInsightsAttributeMetadataOrBuilder>(
getTrendAttribute(),
getParentForChildren(),
isClean());
trendAttribute_ = null;
}
return trendAttributeBuilder_;
}
private com.google.ads.googleads.v20.services.TrendInsightMetrics trendMetrics_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v20.services.TrendInsightMetrics, com.google.ads.googleads.v20.services.TrendInsightMetrics.Builder, com.google.ads.googleads.v20.services.TrendInsightMetricsOrBuilder> trendMetricsBuilder_;
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v20.services.TrendInsightMetrics trend_metrics = 2;</code>
* @return Whether the trendMetrics field is set.
*/
public boolean hasTrendMetrics() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v20.services.TrendInsightMetrics trend_metrics = 2;</code>
* @return The trendMetrics.
*/
public com.google.ads.googleads.v20.services.TrendInsightMetrics getTrendMetrics() {
if (trendMetricsBuilder_ == null) {
return trendMetrics_ == null ? com.google.ads.googleads.v20.services.TrendInsightMetrics.getDefaultInstance() : trendMetrics_;
} else {
return trendMetricsBuilder_.getMessage();
}
}
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v20.services.TrendInsightMetrics trend_metrics = 2;</code>
*/
public Builder setTrendMetrics(com.google.ads.googleads.v20.services.TrendInsightMetrics value) {
if (trendMetricsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
trendMetrics_ = value;
} else {
trendMetricsBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v20.services.TrendInsightMetrics trend_metrics = 2;</code>
*/
public Builder setTrendMetrics(
com.google.ads.googleads.v20.services.TrendInsightMetrics.Builder builderForValue) {
if (trendMetricsBuilder_ == null) {
trendMetrics_ = builderForValue.build();
} else {
trendMetricsBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v20.services.TrendInsightMetrics trend_metrics = 2;</code>
*/
public Builder mergeTrendMetrics(com.google.ads.googleads.v20.services.TrendInsightMetrics value) {
if (trendMetricsBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0) &&
trendMetrics_ != null &&
trendMetrics_ != com.google.ads.googleads.v20.services.TrendInsightMetrics.getDefaultInstance()) {
getTrendMetricsBuilder().mergeFrom(value);
} else {
trendMetrics_ = value;
}
} else {
trendMetricsBuilder_.mergeFrom(value);
}
if (trendMetrics_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v20.services.TrendInsightMetrics trend_metrics = 2;</code>
*/
public Builder clearTrendMetrics() {
bitField0_ = (bitField0_ & ~0x00000002);
trendMetrics_ = null;
if (trendMetricsBuilder_ != null) {
trendMetricsBuilder_.dispose();
trendMetricsBuilder_ = null;
}
onChanged();
return this;
}
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v20.services.TrendInsightMetrics trend_metrics = 2;</code>
*/
public com.google.ads.googleads.v20.services.TrendInsightMetrics.Builder getTrendMetricsBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getTrendMetricsFieldBuilder().getBuilder();
}
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v20.services.TrendInsightMetrics trend_metrics = 2;</code>
*/
public com.google.ads.googleads.v20.services.TrendInsightMetricsOrBuilder getTrendMetricsOrBuilder() {
if (trendMetricsBuilder_ != null) {
return trendMetricsBuilder_.getMessageOrBuilder();
} else {
return trendMetrics_ == null ?
com.google.ads.googleads.v20.services.TrendInsightMetrics.getDefaultInstance() : trendMetrics_;
}
}
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v20.services.TrendInsightMetrics trend_metrics = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v20.services.TrendInsightMetrics, com.google.ads.googleads.v20.services.TrendInsightMetrics.Builder, com.google.ads.googleads.v20.services.TrendInsightMetricsOrBuilder>
getTrendMetricsFieldBuilder() {
if (trendMetricsBuilder_ == null) {
trendMetricsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v20.services.TrendInsightMetrics, com.google.ads.googleads.v20.services.TrendInsightMetrics.Builder, com.google.ads.googleads.v20.services.TrendInsightMetricsOrBuilder>(
getTrendMetrics(),
getParentForChildren(),
isClean());
trendMetrics_ = null;
}
return trendMetricsBuilder_;
}
private int trend_ = 0;
/**
* <pre>
* The direction of trend (such as RISING or DECLINING).
* </pre>
*
* <code>.google.ads.googleads.v20.enums.InsightsTrendEnum.InsightsTrend trend = 3;</code>
* @return The enum numeric value on the wire for trend.
*/
@java.lang.Override public int getTrendValue() {
return trend_;
}
/**
* <pre>
* The direction of trend (such as RISING or DECLINING).
* </pre>
*
* <code>.google.ads.googleads.v20.enums.InsightsTrendEnum.InsightsTrend trend = 3;</code>
* @param value The enum numeric value on the wire for trend to set.
* @return This builder for chaining.
*/
public Builder setTrendValue(int value) {
trend_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* The direction of trend (such as RISING or DECLINING).
* </pre>
*
* <code>.google.ads.googleads.v20.enums.InsightsTrendEnum.InsightsTrend trend = 3;</code>
* @return The trend.
*/
@java.lang.Override
public com.google.ads.googleads.v20.enums.InsightsTrendEnum.InsightsTrend getTrend() {
com.google.ads.googleads.v20.enums.InsightsTrendEnum.InsightsTrend result = com.google.ads.googleads.v20.enums.InsightsTrendEnum.InsightsTrend.forNumber(trend_);
return result == null ? com.google.ads.googleads.v20.enums.InsightsTrendEnum.InsightsTrend.UNRECOGNIZED : result;
}
/**
* <pre>
* The direction of trend (such as RISING or DECLINING).
* </pre>
*
* <code>.google.ads.googleads.v20.enums.InsightsTrendEnum.InsightsTrend trend = 3;</code>
* @param value The trend to set.
* @return This builder for chaining.
*/
public Builder setTrend(com.google.ads.googleads.v20.enums.InsightsTrendEnum.InsightsTrend value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
trend_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* The direction of trend (such as RISING or DECLINING).
* </pre>
*
* <code>.google.ads.googleads.v20.enums.InsightsTrendEnum.InsightsTrend trend = 3;</code>
* @return This builder for chaining.
*/
public Builder clearTrend() {
bitField0_ = (bitField0_ & ~0x00000004);
trend_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.services.TrendInsight)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v20.services.TrendInsight)
private static final com.google.ads.googleads.v20.services.TrendInsight DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v20.services.TrendInsight();
}
public static com.google.ads.googleads.v20.services.TrendInsight getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<TrendInsight>
PARSER = new com.google.protobuf.AbstractParser<TrendInsight>() {
@java.lang.Override
public TrendInsight parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<TrendInsight> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<TrendInsight> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.TrendInsight getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 37,132 | google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/services/TrendInsight.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v21/services/content_creator_insights_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v21.services;
/**
* <pre>
* A trend insight for a given attribute.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.services.TrendInsight}
*/
public final class TrendInsight extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v21.services.TrendInsight)
TrendInsightOrBuilder {
private static final long serialVersionUID = 0L;
// Use TrendInsight.newBuilder() to construct.
private TrendInsight(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TrendInsight() {
trend_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new TrendInsight();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.services.ContentCreatorInsightsServiceProto.internal_static_google_ads_googleads_v21_services_TrendInsight_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.services.ContentCreatorInsightsServiceProto.internal_static_google_ads_googleads_v21_services_TrendInsight_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.services.TrendInsight.class, com.google.ads.googleads.v21.services.TrendInsight.Builder.class);
}
private int bitField0_;
public static final int TREND_ATTRIBUTE_FIELD_NUMBER = 1;
private com.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata trendAttribute_;
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
* @return Whether the trendAttribute field is set.
*/
@java.lang.Override
public boolean hasTrendAttribute() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
* @return The trendAttribute.
*/
@java.lang.Override
public com.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata getTrendAttribute() {
return trendAttribute_ == null ? com.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata.getDefaultInstance() : trendAttribute_;
}
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadataOrBuilder getTrendAttributeOrBuilder() {
return trendAttribute_ == null ? com.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata.getDefaultInstance() : trendAttribute_;
}
public static final int TREND_METRICS_FIELD_NUMBER = 2;
private com.google.ads.googleads.v21.services.TrendInsightMetrics trendMetrics_;
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v21.services.TrendInsightMetrics trend_metrics = 2;</code>
* @return Whether the trendMetrics field is set.
*/
@java.lang.Override
public boolean hasTrendMetrics() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v21.services.TrendInsightMetrics trend_metrics = 2;</code>
* @return The trendMetrics.
*/
@java.lang.Override
public com.google.ads.googleads.v21.services.TrendInsightMetrics getTrendMetrics() {
return trendMetrics_ == null ? com.google.ads.googleads.v21.services.TrendInsightMetrics.getDefaultInstance() : trendMetrics_;
}
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v21.services.TrendInsightMetrics trend_metrics = 2;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v21.services.TrendInsightMetricsOrBuilder getTrendMetricsOrBuilder() {
return trendMetrics_ == null ? com.google.ads.googleads.v21.services.TrendInsightMetrics.getDefaultInstance() : trendMetrics_;
}
public static final int TREND_FIELD_NUMBER = 3;
private int trend_ = 0;
/**
* <pre>
* The direction of trend (such as RISING or DECLINING).
* </pre>
*
* <code>.google.ads.googleads.v21.enums.InsightsTrendEnum.InsightsTrend trend = 3;</code>
* @return The enum numeric value on the wire for trend.
*/
@java.lang.Override public int getTrendValue() {
return trend_;
}
/**
* <pre>
* The direction of trend (such as RISING or DECLINING).
* </pre>
*
* <code>.google.ads.googleads.v21.enums.InsightsTrendEnum.InsightsTrend trend = 3;</code>
* @return The trend.
*/
@java.lang.Override public com.google.ads.googleads.v21.enums.InsightsTrendEnum.InsightsTrend getTrend() {
com.google.ads.googleads.v21.enums.InsightsTrendEnum.InsightsTrend result = com.google.ads.googleads.v21.enums.InsightsTrendEnum.InsightsTrend.forNumber(trend_);
return result == null ? com.google.ads.googleads.v21.enums.InsightsTrendEnum.InsightsTrend.UNRECOGNIZED : result;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getTrendAttribute());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getTrendMetrics());
}
if (trend_ != com.google.ads.googleads.v21.enums.InsightsTrendEnum.InsightsTrend.UNSPECIFIED.getNumber()) {
output.writeEnum(3, trend_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, getTrendAttribute());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(2, getTrendMetrics());
}
if (trend_ != com.google.ads.googleads.v21.enums.InsightsTrendEnum.InsightsTrend.UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream
.computeEnumSize(3, trend_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v21.services.TrendInsight)) {
return super.equals(obj);
}
com.google.ads.googleads.v21.services.TrendInsight other = (com.google.ads.googleads.v21.services.TrendInsight) obj;
if (hasTrendAttribute() != other.hasTrendAttribute()) return false;
if (hasTrendAttribute()) {
if (!getTrendAttribute()
.equals(other.getTrendAttribute())) return false;
}
if (hasTrendMetrics() != other.hasTrendMetrics()) return false;
if (hasTrendMetrics()) {
if (!getTrendMetrics()
.equals(other.getTrendMetrics())) return false;
}
if (trend_ != other.trend_) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasTrendAttribute()) {
hash = (37 * hash) + TREND_ATTRIBUTE_FIELD_NUMBER;
hash = (53 * hash) + getTrendAttribute().hashCode();
}
if (hasTrendMetrics()) {
hash = (37 * hash) + TREND_METRICS_FIELD_NUMBER;
hash = (53 * hash) + getTrendMetrics().hashCode();
}
hash = (37 * hash) + TREND_FIELD_NUMBER;
hash = (53 * hash) + trend_;
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v21.services.TrendInsight parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.TrendInsight parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.TrendInsight parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.TrendInsight parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.TrendInsight parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.TrendInsight parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.TrendInsight parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.TrendInsight parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.TrendInsight parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.TrendInsight parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.TrendInsight parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.TrendInsight parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v21.services.TrendInsight prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* A trend insight for a given attribute.
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.services.TrendInsight}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.services.TrendInsight)
com.google.ads.googleads.v21.services.TrendInsightOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.services.ContentCreatorInsightsServiceProto.internal_static_google_ads_googleads_v21_services_TrendInsight_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.services.ContentCreatorInsightsServiceProto.internal_static_google_ads_googleads_v21_services_TrendInsight_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.services.TrendInsight.class, com.google.ads.googleads.v21.services.TrendInsight.Builder.class);
}
// Construct using com.google.ads.googleads.v21.services.TrendInsight.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3
.alwaysUseFieldBuilders) {
getTrendAttributeFieldBuilder();
getTrendMetricsFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
trendAttribute_ = null;
if (trendAttributeBuilder_ != null) {
trendAttributeBuilder_.dispose();
trendAttributeBuilder_ = null;
}
trendMetrics_ = null;
if (trendMetricsBuilder_ != null) {
trendMetricsBuilder_.dispose();
trendMetricsBuilder_ = null;
}
trend_ = 0;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v21.services.ContentCreatorInsightsServiceProto.internal_static_google_ads_googleads_v21_services_TrendInsight_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.TrendInsight getDefaultInstanceForType() {
return com.google.ads.googleads.v21.services.TrendInsight.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v21.services.TrendInsight build() {
com.google.ads.googleads.v21.services.TrendInsight result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.TrendInsight buildPartial() {
com.google.ads.googleads.v21.services.TrendInsight result = new com.google.ads.googleads.v21.services.TrendInsight(this);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.googleads.v21.services.TrendInsight result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.trendAttribute_ = trendAttributeBuilder_ == null
? trendAttribute_
: trendAttributeBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.trendMetrics_ = trendMetricsBuilder_ == null
? trendMetrics_
: trendMetricsBuilder_.build();
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.trend_ = trend_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v21.services.TrendInsight) {
return mergeFrom((com.google.ads.googleads.v21.services.TrendInsight)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v21.services.TrendInsight other) {
if (other == com.google.ads.googleads.v21.services.TrendInsight.getDefaultInstance()) return this;
if (other.hasTrendAttribute()) {
mergeTrendAttribute(other.getTrendAttribute());
}
if (other.hasTrendMetrics()) {
mergeTrendMetrics(other.getTrendMetrics());
}
if (other.trend_ != 0) {
setTrendValue(other.getTrendValue());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
input.readMessage(
getTrendAttributeFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18: {
input.readMessage(
getTrendMetricsFieldBuilder().getBuilder(),
extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 24: {
trend_ = input.readEnum();
bitField0_ |= 0x00000004;
break;
} // case 24
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata trendAttribute_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata, com.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata.Builder, com.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadataOrBuilder> trendAttributeBuilder_;
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
* @return Whether the trendAttribute field is set.
*/
public boolean hasTrendAttribute() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
* @return The trendAttribute.
*/
public com.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata getTrendAttribute() {
if (trendAttributeBuilder_ == null) {
return trendAttribute_ == null ? com.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata.getDefaultInstance() : trendAttribute_;
} else {
return trendAttributeBuilder_.getMessage();
}
}
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
*/
public Builder setTrendAttribute(com.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata value) {
if (trendAttributeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
trendAttribute_ = value;
} else {
trendAttributeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
*/
public Builder setTrendAttribute(
com.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata.Builder builderForValue) {
if (trendAttributeBuilder_ == null) {
trendAttribute_ = builderForValue.build();
} else {
trendAttributeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
*/
public Builder mergeTrendAttribute(com.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata value) {
if (trendAttributeBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0) &&
trendAttribute_ != null &&
trendAttribute_ != com.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata.getDefaultInstance()) {
getTrendAttributeBuilder().mergeFrom(value);
} else {
trendAttribute_ = value;
}
} else {
trendAttributeBuilder_.mergeFrom(value);
}
if (trendAttribute_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
*/
public Builder clearTrendAttribute() {
bitField0_ = (bitField0_ & ~0x00000001);
trendAttribute_ = null;
if (trendAttributeBuilder_ != null) {
trendAttributeBuilder_.dispose();
trendAttributeBuilder_ = null;
}
onChanged();
return this;
}
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
*/
public com.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata.Builder getTrendAttributeBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getTrendAttributeFieldBuilder().getBuilder();
}
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
*/
public com.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadataOrBuilder getTrendAttributeOrBuilder() {
if (trendAttributeBuilder_ != null) {
return trendAttributeBuilder_.getMessageOrBuilder();
} else {
return trendAttribute_ == null ?
com.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata.getDefaultInstance() : trendAttribute_;
}
}
/**
* <pre>
* The attribute this trend is for.
* </pre>
*
* <code>.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata trend_attribute = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata, com.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata.Builder, com.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadataOrBuilder>
getTrendAttributeFieldBuilder() {
if (trendAttributeBuilder_ == null) {
trendAttributeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata, com.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadata.Builder, com.google.ads.googleads.v21.common.AudienceInsightsAttributeMetadataOrBuilder>(
getTrendAttribute(),
getParentForChildren(),
isClean());
trendAttribute_ = null;
}
return trendAttributeBuilder_;
}
private com.google.ads.googleads.v21.services.TrendInsightMetrics trendMetrics_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v21.services.TrendInsightMetrics, com.google.ads.googleads.v21.services.TrendInsightMetrics.Builder, com.google.ads.googleads.v21.services.TrendInsightMetricsOrBuilder> trendMetricsBuilder_;
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v21.services.TrendInsightMetrics trend_metrics = 2;</code>
* @return Whether the trendMetrics field is set.
*/
public boolean hasTrendMetrics() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v21.services.TrendInsightMetrics trend_metrics = 2;</code>
* @return The trendMetrics.
*/
public com.google.ads.googleads.v21.services.TrendInsightMetrics getTrendMetrics() {
if (trendMetricsBuilder_ == null) {
return trendMetrics_ == null ? com.google.ads.googleads.v21.services.TrendInsightMetrics.getDefaultInstance() : trendMetrics_;
} else {
return trendMetricsBuilder_.getMessage();
}
}
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v21.services.TrendInsightMetrics trend_metrics = 2;</code>
*/
public Builder setTrendMetrics(com.google.ads.googleads.v21.services.TrendInsightMetrics value) {
if (trendMetricsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
trendMetrics_ = value;
} else {
trendMetricsBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v21.services.TrendInsightMetrics trend_metrics = 2;</code>
*/
public Builder setTrendMetrics(
com.google.ads.googleads.v21.services.TrendInsightMetrics.Builder builderForValue) {
if (trendMetricsBuilder_ == null) {
trendMetrics_ = builderForValue.build();
} else {
trendMetricsBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v21.services.TrendInsightMetrics trend_metrics = 2;</code>
*/
public Builder mergeTrendMetrics(com.google.ads.googleads.v21.services.TrendInsightMetrics value) {
if (trendMetricsBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0) &&
trendMetrics_ != null &&
trendMetrics_ != com.google.ads.googleads.v21.services.TrendInsightMetrics.getDefaultInstance()) {
getTrendMetricsBuilder().mergeFrom(value);
} else {
trendMetrics_ = value;
}
} else {
trendMetricsBuilder_.mergeFrom(value);
}
if (trendMetrics_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v21.services.TrendInsightMetrics trend_metrics = 2;</code>
*/
public Builder clearTrendMetrics() {
bitField0_ = (bitField0_ & ~0x00000002);
trendMetrics_ = null;
if (trendMetricsBuilder_ != null) {
trendMetricsBuilder_.dispose();
trendMetricsBuilder_ = null;
}
onChanged();
return this;
}
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v21.services.TrendInsightMetrics trend_metrics = 2;</code>
*/
public com.google.ads.googleads.v21.services.TrendInsightMetrics.Builder getTrendMetricsBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getTrendMetricsFieldBuilder().getBuilder();
}
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v21.services.TrendInsightMetrics trend_metrics = 2;</code>
*/
public com.google.ads.googleads.v21.services.TrendInsightMetricsOrBuilder getTrendMetricsOrBuilder() {
if (trendMetricsBuilder_ != null) {
return trendMetricsBuilder_.getMessageOrBuilder();
} else {
return trendMetrics_ == null ?
com.google.ads.googleads.v21.services.TrendInsightMetrics.getDefaultInstance() : trendMetrics_;
}
}
/**
* <pre>
* Metrics associated with this trend.
* </pre>
*
* <code>.google.ads.googleads.v21.services.TrendInsightMetrics trend_metrics = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v21.services.TrendInsightMetrics, com.google.ads.googleads.v21.services.TrendInsightMetrics.Builder, com.google.ads.googleads.v21.services.TrendInsightMetricsOrBuilder>
getTrendMetricsFieldBuilder() {
if (trendMetricsBuilder_ == null) {
trendMetricsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3<
com.google.ads.googleads.v21.services.TrendInsightMetrics, com.google.ads.googleads.v21.services.TrendInsightMetrics.Builder, com.google.ads.googleads.v21.services.TrendInsightMetricsOrBuilder>(
getTrendMetrics(),
getParentForChildren(),
isClean());
trendMetrics_ = null;
}
return trendMetricsBuilder_;
}
private int trend_ = 0;
/**
* <pre>
* The direction of trend (such as RISING or DECLINING).
* </pre>
*
* <code>.google.ads.googleads.v21.enums.InsightsTrendEnum.InsightsTrend trend = 3;</code>
* @return The enum numeric value on the wire for trend.
*/
@java.lang.Override public int getTrendValue() {
return trend_;
}
/**
* <pre>
* The direction of trend (such as RISING or DECLINING).
* </pre>
*
* <code>.google.ads.googleads.v21.enums.InsightsTrendEnum.InsightsTrend trend = 3;</code>
* @param value The enum numeric value on the wire for trend to set.
* @return This builder for chaining.
*/
public Builder setTrendValue(int value) {
trend_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
* <pre>
* The direction of trend (such as RISING or DECLINING).
* </pre>
*
* <code>.google.ads.googleads.v21.enums.InsightsTrendEnum.InsightsTrend trend = 3;</code>
* @return The trend.
*/
@java.lang.Override
public com.google.ads.googleads.v21.enums.InsightsTrendEnum.InsightsTrend getTrend() {
com.google.ads.googleads.v21.enums.InsightsTrendEnum.InsightsTrend result = com.google.ads.googleads.v21.enums.InsightsTrendEnum.InsightsTrend.forNumber(trend_);
return result == null ? com.google.ads.googleads.v21.enums.InsightsTrendEnum.InsightsTrend.UNRECOGNIZED : result;
}
/**
* <pre>
* The direction of trend (such as RISING or DECLINING).
* </pre>
*
* <code>.google.ads.googleads.v21.enums.InsightsTrendEnum.InsightsTrend trend = 3;</code>
* @param value The trend to set.
* @return This builder for chaining.
*/
public Builder setTrend(com.google.ads.googleads.v21.enums.InsightsTrendEnum.InsightsTrend value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000004;
trend_ = value.getNumber();
onChanged();
return this;
}
/**
* <pre>
* The direction of trend (such as RISING or DECLINING).
* </pre>
*
* <code>.google.ads.googleads.v21.enums.InsightsTrendEnum.InsightsTrend trend = 3;</code>
* @return This builder for chaining.
*/
public Builder clearTrend() {
bitField0_ = (bitField0_ & ~0x00000004);
trend_ = 0;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.services.TrendInsight)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v21.services.TrendInsight)
private static final com.google.ads.googleads.v21.services.TrendInsight DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v21.services.TrendInsight();
}
public static com.google.ads.googleads.v21.services.TrendInsight getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<TrendInsight>
PARSER = new com.google.protobuf.AbstractParser<TrendInsight>() {
@java.lang.Override
public TrendInsight parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<TrendInsight> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<TrendInsight> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.TrendInsight getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,959 | java-bigquery-data-exchange/proto-google-cloud-bigquery-data-exchange-v1beta1/src/main/java/com/google/cloud/bigquery/dataexchange/v1beta1/UpdateDataExchangeRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/bigquery/dataexchange/v1beta1/dataexchange.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.bigquery.dataexchange.v1beta1;
/**
*
*
* <pre>
* Message for updating a data exchange.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest}
*/
public final class UpdateDataExchangeRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest)
UpdateDataExchangeRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateDataExchangeRequest.newBuilder() to construct.
private UpdateDataExchangeRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateDataExchangeRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateDataExchangeRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.dataexchange.v1beta1.DataExchangeProto
.internal_static_google_cloud_bigquery_dataexchange_v1beta1_UpdateDataExchangeRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.dataexchange.v1beta1.DataExchangeProto
.internal_static_google_cloud_bigquery_dataexchange_v1beta1_UpdateDataExchangeRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest.class,
com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest.Builder.class);
}
private int bitField0_;
public static final int UPDATE_MASK_FIELD_NUMBER = 1;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. Field mask specifies the fields to update in the data exchange
* resource. The fields specified in the
* `updateMask` are relative to the resource and are not a full request.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Field mask specifies the fields to update in the data exchange
* resource. The fields specified in the
* `updateMask` are relative to the resource and are not a full request.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. Field mask specifies the fields to update in the data exchange
* resource. The fields specified in the
* `updateMask` are relative to the resource and are not a full request.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
public static final int DATA_EXCHANGE_FIELD_NUMBER = 2;
private com.google.cloud.bigquery.dataexchange.v1beta1.DataExchange dataExchange_;
/**
*
*
* <pre>
* Required. The data exchange to update.
* </pre>
*
* <code>
* .google.cloud.bigquery.dataexchange.v1beta1.DataExchange data_exchange = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the dataExchange field is set.
*/
@java.lang.Override
public boolean hasDataExchange() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The data exchange to update.
* </pre>
*
* <code>
* .google.cloud.bigquery.dataexchange.v1beta1.DataExchange data_exchange = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The dataExchange.
*/
@java.lang.Override
public com.google.cloud.bigquery.dataexchange.v1beta1.DataExchange getDataExchange() {
return dataExchange_ == null
? com.google.cloud.bigquery.dataexchange.v1beta1.DataExchange.getDefaultInstance()
: dataExchange_;
}
/**
*
*
* <pre>
* Required. The data exchange to update.
* </pre>
*
* <code>
* .google.cloud.bigquery.dataexchange.v1beta1.DataExchange data_exchange = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.bigquery.dataexchange.v1beta1.DataExchangeOrBuilder
getDataExchangeOrBuilder() {
return dataExchange_ == null
? com.google.cloud.bigquery.dataexchange.v1beta1.DataExchange.getDefaultInstance()
: dataExchange_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getDataExchange());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getDataExchange());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest)) {
return super.equals(obj);
}
com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest other =
(com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest) obj;
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (hasDataExchange() != other.hasDataExchange()) return false;
if (hasDataExchange()) {
if (!getDataExchange().equals(other.getDataExchange())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
if (hasDataExchange()) {
hash = (37 * hash) + DATA_EXCHANGE_FIELD_NUMBER;
hash = (53 * hash) + getDataExchange().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Message for updating a data exchange.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest)
com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.dataexchange.v1beta1.DataExchangeProto
.internal_static_google_cloud_bigquery_dataexchange_v1beta1_UpdateDataExchangeRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.dataexchange.v1beta1.DataExchangeProto
.internal_static_google_cloud_bigquery_dataexchange_v1beta1_UpdateDataExchangeRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest.class,
com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest.Builder
.class);
}
// Construct using
// com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getUpdateMaskFieldBuilder();
getDataExchangeFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
dataExchange_ = null;
if (dataExchangeBuilder_ != null) {
dataExchangeBuilder_.dispose();
dataExchangeBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.bigquery.dataexchange.v1beta1.DataExchangeProto
.internal_static_google_cloud_bigquery_dataexchange_v1beta1_UpdateDataExchangeRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest
getDefaultInstanceForType() {
return com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest build() {
com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest result =
buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest buildPartial() {
com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest result =
new com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.dataExchange_ =
dataExchangeBuilder_ == null ? dataExchange_ : dataExchangeBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest) {
return mergeFrom(
(com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest other) {
if (other
== com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest
.getDefaultInstance()) return this;
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
if (other.hasDataExchange()) {
mergeDataExchange(other.getDataExchange());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getDataExchangeFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. Field mask specifies the fields to update in the data exchange
* resource. The fields specified in the
* `updateMask` are relative to the resource and are not a full request.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Field mask specifies the fields to update in the data exchange
* resource. The fields specified in the
* `updateMask` are relative to the resource and are not a full request.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Field mask specifies the fields to update in the data exchange
* resource. The fields specified in the
* `updateMask` are relative to the resource and are not a full request.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Field mask specifies the fields to update in the data exchange
* resource. The fields specified in the
* `updateMask` are relative to the resource and are not a full request.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Field mask specifies the fields to update in the data exchange
* resource. The fields specified in the
* `updateMask` are relative to the resource and are not a full request.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Field mask specifies the fields to update in the data exchange
* resource. The fields specified in the
* `updateMask` are relative to the resource and are not a full request.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000001);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Field mask specifies the fields to update in the data exchange
* resource. The fields specified in the
* `updateMask` are relative to the resource and are not a full request.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Field mask specifies the fields to update in the data exchange
* resource. The fields specified in the
* `updateMask` are relative to the resource and are not a full request.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. Field mask specifies the fields to update in the data exchange
* resource. The fields specified in the
* `updateMask` are relative to the resource and are not a full request.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private com.google.cloud.bigquery.dataexchange.v1beta1.DataExchange dataExchange_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.dataexchange.v1beta1.DataExchange,
com.google.cloud.bigquery.dataexchange.v1beta1.DataExchange.Builder,
com.google.cloud.bigquery.dataexchange.v1beta1.DataExchangeOrBuilder>
dataExchangeBuilder_;
/**
*
*
* <pre>
* Required. The data exchange to update.
* </pre>
*
* <code>
* .google.cloud.bigquery.dataexchange.v1beta1.DataExchange data_exchange = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the dataExchange field is set.
*/
public boolean hasDataExchange() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. The data exchange to update.
* </pre>
*
* <code>
* .google.cloud.bigquery.dataexchange.v1beta1.DataExchange data_exchange = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The dataExchange.
*/
public com.google.cloud.bigquery.dataexchange.v1beta1.DataExchange getDataExchange() {
if (dataExchangeBuilder_ == null) {
return dataExchange_ == null
? com.google.cloud.bigquery.dataexchange.v1beta1.DataExchange.getDefaultInstance()
: dataExchange_;
} else {
return dataExchangeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. The data exchange to update.
* </pre>
*
* <code>
* .google.cloud.bigquery.dataexchange.v1beta1.DataExchange data_exchange = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setDataExchange(
com.google.cloud.bigquery.dataexchange.v1beta1.DataExchange value) {
if (dataExchangeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
dataExchange_ = value;
} else {
dataExchangeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The data exchange to update.
* </pre>
*
* <code>
* .google.cloud.bigquery.dataexchange.v1beta1.DataExchange data_exchange = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setDataExchange(
com.google.cloud.bigquery.dataexchange.v1beta1.DataExchange.Builder builderForValue) {
if (dataExchangeBuilder_ == null) {
dataExchange_ = builderForValue.build();
} else {
dataExchangeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The data exchange to update.
* </pre>
*
* <code>
* .google.cloud.bigquery.dataexchange.v1beta1.DataExchange data_exchange = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeDataExchange(
com.google.cloud.bigquery.dataexchange.v1beta1.DataExchange value) {
if (dataExchangeBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& dataExchange_ != null
&& dataExchange_
!= com.google.cloud.bigquery.dataexchange.v1beta1.DataExchange
.getDefaultInstance()) {
getDataExchangeBuilder().mergeFrom(value);
} else {
dataExchange_ = value;
}
} else {
dataExchangeBuilder_.mergeFrom(value);
}
if (dataExchange_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. The data exchange to update.
* </pre>
*
* <code>
* .google.cloud.bigquery.dataexchange.v1beta1.DataExchange data_exchange = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearDataExchange() {
bitField0_ = (bitField0_ & ~0x00000002);
dataExchange_ = null;
if (dataExchangeBuilder_ != null) {
dataExchangeBuilder_.dispose();
dataExchangeBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The data exchange to update.
* </pre>
*
* <code>
* .google.cloud.bigquery.dataexchange.v1beta1.DataExchange data_exchange = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.bigquery.dataexchange.v1beta1.DataExchange.Builder
getDataExchangeBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getDataExchangeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. The data exchange to update.
* </pre>
*
* <code>
* .google.cloud.bigquery.dataexchange.v1beta1.DataExchange data_exchange = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.bigquery.dataexchange.v1beta1.DataExchangeOrBuilder
getDataExchangeOrBuilder() {
if (dataExchangeBuilder_ != null) {
return dataExchangeBuilder_.getMessageOrBuilder();
} else {
return dataExchange_ == null
? com.google.cloud.bigquery.dataexchange.v1beta1.DataExchange.getDefaultInstance()
: dataExchange_;
}
}
/**
*
*
* <pre>
* Required. The data exchange to update.
* </pre>
*
* <code>
* .google.cloud.bigquery.dataexchange.v1beta1.DataExchange data_exchange = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.dataexchange.v1beta1.DataExchange,
com.google.cloud.bigquery.dataexchange.v1beta1.DataExchange.Builder,
com.google.cloud.bigquery.dataexchange.v1beta1.DataExchangeOrBuilder>
getDataExchangeFieldBuilder() {
if (dataExchangeBuilder_ == null) {
dataExchangeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.dataexchange.v1beta1.DataExchange,
com.google.cloud.bigquery.dataexchange.v1beta1.DataExchange.Builder,
com.google.cloud.bigquery.dataexchange.v1beta1.DataExchangeOrBuilder>(
getDataExchange(), getParentForChildren(), isClean());
dataExchange_ = null;
}
return dataExchangeBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest)
private static final com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest();
}
public static com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateDataExchangeRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateDataExchangeRequest>() {
@java.lang.Override
public UpdateDataExchangeRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateDataExchangeRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateDataExchangeRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.bigquery.dataexchange.v1beta1.UpdateDataExchangeRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,175 | java-discoveryengine/google-cloud-discoveryengine/src/main/java/com/google/cloud/discoveryengine/v1beta/stub/HttpJsonSampleQueryServiceStub.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.discoveryengine.v1beta.stub;
import static com.google.cloud.discoveryengine.v1beta.SampleQueryServiceClient.ListSampleQueriesPagedResponse;
import com.google.api.HttpRule;
import com.google.api.core.BetaApi;
import com.google.api.core.InternalApi;
import com.google.api.gax.core.BackgroundResource;
import com.google.api.gax.core.BackgroundResourceAggregation;
import com.google.api.gax.httpjson.ApiMethodDescriptor;
import com.google.api.gax.httpjson.HttpJsonCallSettings;
import com.google.api.gax.httpjson.HttpJsonOperationSnapshot;
import com.google.api.gax.httpjson.HttpJsonStubCallableFactory;
import com.google.api.gax.httpjson.ProtoMessageRequestFormatter;
import com.google.api.gax.httpjson.ProtoMessageResponseParser;
import com.google.api.gax.httpjson.ProtoRestSerializer;
import com.google.api.gax.httpjson.longrunning.stub.HttpJsonOperationsStub;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallable;
import com.google.api.gax.rpc.RequestParamsBuilder;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.discoveryengine.v1beta.CreateSampleQueryRequest;
import com.google.cloud.discoveryengine.v1beta.DeleteSampleQueryRequest;
import com.google.cloud.discoveryengine.v1beta.GetSampleQueryRequest;
import com.google.cloud.discoveryengine.v1beta.ImportSampleQueriesMetadata;
import com.google.cloud.discoveryengine.v1beta.ImportSampleQueriesRequest;
import com.google.cloud.discoveryengine.v1beta.ImportSampleQueriesResponse;
import com.google.cloud.discoveryengine.v1beta.ListSampleQueriesRequest;
import com.google.cloud.discoveryengine.v1beta.ListSampleQueriesResponse;
import com.google.cloud.discoveryengine.v1beta.SampleQuery;
import com.google.cloud.discoveryengine.v1beta.UpdateSampleQueryRequest;
import com.google.common.collect.ImmutableMap;
import com.google.longrunning.Operation;
import com.google.protobuf.Empty;
import com.google.protobuf.TypeRegistry;
import java.io.IOException;
import java.util.ArrayList;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* REST stub implementation for the SampleQueryService service API.
*
* <p>This class is for advanced usage and reflects the underlying API directly.
*/
@BetaApi
@Generated("by gapic-generator-java")
public class HttpJsonSampleQueryServiceStub extends SampleQueryServiceStub {
private static final TypeRegistry typeRegistry =
TypeRegistry.newBuilder()
.add(ImportSampleQueriesMetadata.getDescriptor())
.add(ImportSampleQueriesResponse.getDescriptor())
.build();
private static final ApiMethodDescriptor<GetSampleQueryRequest, SampleQuery>
getSampleQueryMethodDescriptor =
ApiMethodDescriptor.<GetSampleQueryRequest, SampleQuery>newBuilder()
.setFullMethodName(
"google.cloud.discoveryengine.v1beta.SampleQueryService/GetSampleQuery")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<GetSampleQueryRequest>newBuilder()
.setPath(
"/v1beta/{name=projects/*/locations/*/sampleQuerySets/*/sampleQueries/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<GetSampleQueryRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<GetSampleQueryRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<SampleQuery>newBuilder()
.setDefaultInstance(SampleQuery.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<ListSampleQueriesRequest, ListSampleQueriesResponse>
listSampleQueriesMethodDescriptor =
ApiMethodDescriptor.<ListSampleQueriesRequest, ListSampleQueriesResponse>newBuilder()
.setFullMethodName(
"google.cloud.discoveryengine.v1beta.SampleQueryService/ListSampleQueries")
.setHttpMethod("GET")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<ListSampleQueriesRequest>newBuilder()
.setPath(
"/v1beta/{parent=projects/*/locations/*/sampleQuerySets/*}/sampleQueries",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ListSampleQueriesRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "parent", request.getParent());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ListSampleQueriesRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "pageSize", request.getPageSize());
serializer.putQueryParam(fields, "pageToken", request.getPageToken());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<ListSampleQueriesResponse>newBuilder()
.setDefaultInstance(ListSampleQueriesResponse.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<CreateSampleQueryRequest, SampleQuery>
createSampleQueryMethodDescriptor =
ApiMethodDescriptor.<CreateSampleQueryRequest, SampleQuery>newBuilder()
.setFullMethodName(
"google.cloud.discoveryengine.v1beta.SampleQueryService/CreateSampleQuery")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<CreateSampleQueryRequest>newBuilder()
.setPath(
"/v1beta/{parent=projects/*/locations/*/sampleQuerySets/*}/sampleQueries",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<CreateSampleQueryRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "parent", request.getParent());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<CreateSampleQueryRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(
fields, "sampleQueryId", request.getSampleQueryId());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("sampleQuery", request.getSampleQuery(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<SampleQuery>newBuilder()
.setDefaultInstance(SampleQuery.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<UpdateSampleQueryRequest, SampleQuery>
updateSampleQueryMethodDescriptor =
ApiMethodDescriptor.<UpdateSampleQueryRequest, SampleQuery>newBuilder()
.setFullMethodName(
"google.cloud.discoveryengine.v1beta.SampleQueryService/UpdateSampleQuery")
.setHttpMethod("PATCH")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<UpdateSampleQueryRequest>newBuilder()
.setPath(
"/v1beta/{sampleQuery.name=projects/*/locations/*/sampleQuerySets/*/sampleQueries/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<UpdateSampleQueryRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(
fields, "sampleQuery.name", request.getSampleQuery().getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<UpdateSampleQueryRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "updateMask", request.getUpdateMask());
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("sampleQuery", request.getSampleQuery(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<SampleQuery>newBuilder()
.setDefaultInstance(SampleQuery.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<DeleteSampleQueryRequest, Empty>
deleteSampleQueryMethodDescriptor =
ApiMethodDescriptor.<DeleteSampleQueryRequest, Empty>newBuilder()
.setFullMethodName(
"google.cloud.discoveryengine.v1beta.SampleQueryService/DeleteSampleQuery")
.setHttpMethod("DELETE")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<DeleteSampleQueryRequest>newBuilder()
.setPath(
"/v1beta/{name=projects/*/locations/*/sampleQuerySets/*/sampleQueries/*}",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<DeleteSampleQueryRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "name", request.getName());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<DeleteSampleQueryRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(request -> null)
.build())
.setResponseParser(
ProtoMessageResponseParser.<Empty>newBuilder()
.setDefaultInstance(Empty.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.build();
private static final ApiMethodDescriptor<ImportSampleQueriesRequest, Operation>
importSampleQueriesMethodDescriptor =
ApiMethodDescriptor.<ImportSampleQueriesRequest, Operation>newBuilder()
.setFullMethodName(
"google.cloud.discoveryengine.v1beta.SampleQueryService/ImportSampleQueries")
.setHttpMethod("POST")
.setType(ApiMethodDescriptor.MethodType.UNARY)
.setRequestFormatter(
ProtoMessageRequestFormatter.<ImportSampleQueriesRequest>newBuilder()
.setPath(
"/v1beta/{parent=projects/*/locations/*/sampleQuerySets/*}/sampleQueries:import",
request -> {
Map<String, String> fields = new HashMap<>();
ProtoRestSerializer<ImportSampleQueriesRequest> serializer =
ProtoRestSerializer.create();
serializer.putPathParam(fields, "parent", request.getParent());
return fields;
})
.setQueryParamsExtractor(
request -> {
Map<String, List<String>> fields = new HashMap<>();
ProtoRestSerializer<ImportSampleQueriesRequest> serializer =
ProtoRestSerializer.create();
serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int");
return fields;
})
.setRequestBodyExtractor(
request ->
ProtoRestSerializer.create()
.toBody("*", request.toBuilder().clearParent().build(), true))
.build())
.setResponseParser(
ProtoMessageResponseParser.<Operation>newBuilder()
.setDefaultInstance(Operation.getDefaultInstance())
.setDefaultTypeRegistry(typeRegistry)
.build())
.setOperationSnapshotFactory(
(ImportSampleQueriesRequest request, Operation response) ->
HttpJsonOperationSnapshot.create(response))
.build();
private final UnaryCallable<GetSampleQueryRequest, SampleQuery> getSampleQueryCallable;
private final UnaryCallable<ListSampleQueriesRequest, ListSampleQueriesResponse>
listSampleQueriesCallable;
private final UnaryCallable<ListSampleQueriesRequest, ListSampleQueriesPagedResponse>
listSampleQueriesPagedCallable;
private final UnaryCallable<CreateSampleQueryRequest, SampleQuery> createSampleQueryCallable;
private final UnaryCallable<UpdateSampleQueryRequest, SampleQuery> updateSampleQueryCallable;
private final UnaryCallable<DeleteSampleQueryRequest, Empty> deleteSampleQueryCallable;
private final UnaryCallable<ImportSampleQueriesRequest, Operation> importSampleQueriesCallable;
private final OperationCallable<
ImportSampleQueriesRequest, ImportSampleQueriesResponse, ImportSampleQueriesMetadata>
importSampleQueriesOperationCallable;
private final BackgroundResource backgroundResources;
private final HttpJsonOperationsStub httpJsonOperationsStub;
private final HttpJsonStubCallableFactory callableFactory;
public static final HttpJsonSampleQueryServiceStub create(SampleQueryServiceStubSettings settings)
throws IOException {
return new HttpJsonSampleQueryServiceStub(settings, ClientContext.create(settings));
}
public static final HttpJsonSampleQueryServiceStub create(ClientContext clientContext)
throws IOException {
return new HttpJsonSampleQueryServiceStub(
SampleQueryServiceStubSettings.newHttpJsonBuilder().build(), clientContext);
}
public static final HttpJsonSampleQueryServiceStub create(
ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException {
return new HttpJsonSampleQueryServiceStub(
SampleQueryServiceStubSettings.newHttpJsonBuilder().build(),
clientContext,
callableFactory);
}
/**
* Constructs an instance of HttpJsonSampleQueryServiceStub, using the given settings. This is
* protected so that it is easy to make a subclass, but otherwise, the static factory methods
* should be preferred.
*/
protected HttpJsonSampleQueryServiceStub(
SampleQueryServiceStubSettings settings, ClientContext clientContext) throws IOException {
this(settings, clientContext, new HttpJsonSampleQueryServiceCallableFactory());
}
/**
* Constructs an instance of HttpJsonSampleQueryServiceStub, using the given settings. This is
* protected so that it is easy to make a subclass, but otherwise, the static factory methods
* should be preferred.
*/
protected HttpJsonSampleQueryServiceStub(
SampleQueryServiceStubSettings settings,
ClientContext clientContext,
HttpJsonStubCallableFactory callableFactory)
throws IOException {
this.callableFactory = callableFactory;
this.httpJsonOperationsStub =
HttpJsonOperationsStub.create(
clientContext,
callableFactory,
typeRegistry,
ImmutableMap.<String, HttpRule>builder()
.put(
"google.longrunning.Operations.CancelOperation",
HttpRule.newBuilder()
.setPost(
"/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}:cancel")
.addAdditionalBindings(
HttpRule.newBuilder()
.setPost(
"/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}:cancel")
.build())
.build())
.put(
"google.longrunning.Operations.GetOperation",
HttpRule.newBuilder()
.setGet(
"/v1beta/{name=projects/*/locations/*/collections/*/dataConnector/operations/*}")
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet(
"/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*/operations/*}")
.build())
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet(
"/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*/operations/*}")
.build())
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet(
"/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/operations/*}")
.build())
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet(
"/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*/operations/*}")
.build())
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet(
"/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/operations/*}")
.build())
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet(
"/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites/operations/*}")
.build())
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet(
"/v1beta/{name=projects/*/locations/*/collections/*/engines/*/operations/*}")
.build())
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet(
"/v1beta/{name=projects/*/locations/*/collections/*/operations/*}")
.build())
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet(
"/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*/operations/*}")
.build())
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet(
"/v1beta/{name=projects/*/locations/*/dataStores/*/models/*/operations/*}")
.build())
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet(
"/v1beta/{name=projects/*/locations/*/dataStores/*/operations/*}")
.build())
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet(
"/v1beta/{name=projects/*/locations/*/evaluations/*/operations/*}")
.build())
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet("/v1beta/{name=projects/*/locations/*/operations/*}")
.build())
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet(
"/v1beta/{name=projects/*/locations/*/sampleQuerySets/*/operations/*}")
.build())
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet("/v1beta/{name=projects/*/operations/*}")
.build())
.build())
.put(
"google.longrunning.Operations.ListOperations",
HttpRule.newBuilder()
.setGet(
"/v1beta/{name=projects/*/locations/*/collections/*/dataConnector}/operations")
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet(
"/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/branches/*}/operations")
.build())
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet(
"/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/models/*}/operations")
.build())
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet(
"/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/schemas/*}/operations")
.build())
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet(
"/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine/targetSites}/operations")
.build())
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet(
"/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*/siteSearchEngine}/operations")
.build())
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet(
"/v1beta/{name=projects/*/locations/*/collections/*/dataStores/*}/operations")
.build())
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet(
"/v1beta/{name=projects/*/locations/*/collections/*/engines/*}/operations")
.build())
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet(
"/v1beta/{name=projects/*/locations/*/collections/*}/operations")
.build())
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet(
"/v1beta/{name=projects/*/locations/*/dataStores/*/branches/*}/operations")
.build())
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet(
"/v1beta/{name=projects/*/locations/*/dataStores/*/models/*}/operations")
.build())
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet(
"/v1beta/{name=projects/*/locations/*/dataStores/*}/operations")
.build())
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet("/v1beta/{name=projects/*/locations/*}/operations")
.build())
.addAdditionalBindings(
HttpRule.newBuilder()
.setGet("/v1beta/{name=projects/*}/operations")
.build())
.build())
.build());
HttpJsonCallSettings<GetSampleQueryRequest, SampleQuery> getSampleQueryTransportSettings =
HttpJsonCallSettings.<GetSampleQueryRequest, SampleQuery>newBuilder()
.setMethodDescriptor(getSampleQueryMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<ListSampleQueriesRequest, ListSampleQueriesResponse>
listSampleQueriesTransportSettings =
HttpJsonCallSettings.<ListSampleQueriesRequest, ListSampleQueriesResponse>newBuilder()
.setMethodDescriptor(listSampleQueriesMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
HttpJsonCallSettings<CreateSampleQueryRequest, SampleQuery> createSampleQueryTransportSettings =
HttpJsonCallSettings.<CreateSampleQueryRequest, SampleQuery>newBuilder()
.setMethodDescriptor(createSampleQueryMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
HttpJsonCallSettings<UpdateSampleQueryRequest, SampleQuery> updateSampleQueryTransportSettings =
HttpJsonCallSettings.<UpdateSampleQueryRequest, SampleQuery>newBuilder()
.setMethodDescriptor(updateSampleQueryMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add(
"sample_query.name", String.valueOf(request.getSampleQuery().getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<DeleteSampleQueryRequest, Empty> deleteSampleQueryTransportSettings =
HttpJsonCallSettings.<DeleteSampleQueryRequest, Empty>newBuilder()
.setMethodDescriptor(deleteSampleQueryMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("name", String.valueOf(request.getName()));
return builder.build();
})
.build();
HttpJsonCallSettings<ImportSampleQueriesRequest, Operation>
importSampleQueriesTransportSettings =
HttpJsonCallSettings.<ImportSampleQueriesRequest, Operation>newBuilder()
.setMethodDescriptor(importSampleQueriesMethodDescriptor)
.setTypeRegistry(typeRegistry)
.setParamsExtractor(
request -> {
RequestParamsBuilder builder = RequestParamsBuilder.create();
builder.add("parent", String.valueOf(request.getParent()));
return builder.build();
})
.build();
this.getSampleQueryCallable =
callableFactory.createUnaryCallable(
getSampleQueryTransportSettings, settings.getSampleQuerySettings(), clientContext);
this.listSampleQueriesCallable =
callableFactory.createUnaryCallable(
listSampleQueriesTransportSettings,
settings.listSampleQueriesSettings(),
clientContext);
this.listSampleQueriesPagedCallable =
callableFactory.createPagedCallable(
listSampleQueriesTransportSettings,
settings.listSampleQueriesSettings(),
clientContext);
this.createSampleQueryCallable =
callableFactory.createUnaryCallable(
createSampleQueryTransportSettings,
settings.createSampleQuerySettings(),
clientContext);
this.updateSampleQueryCallable =
callableFactory.createUnaryCallable(
updateSampleQueryTransportSettings,
settings.updateSampleQuerySettings(),
clientContext);
this.deleteSampleQueryCallable =
callableFactory.createUnaryCallable(
deleteSampleQueryTransportSettings,
settings.deleteSampleQuerySettings(),
clientContext);
this.importSampleQueriesCallable =
callableFactory.createUnaryCallable(
importSampleQueriesTransportSettings,
settings.importSampleQueriesSettings(),
clientContext);
this.importSampleQueriesOperationCallable =
callableFactory.createOperationCallable(
importSampleQueriesTransportSettings,
settings.importSampleQueriesOperationSettings(),
clientContext,
httpJsonOperationsStub);
this.backgroundResources =
new BackgroundResourceAggregation(clientContext.getBackgroundResources());
}
@InternalApi
public static List<ApiMethodDescriptor> getMethodDescriptors() {
List<ApiMethodDescriptor> methodDescriptors = new ArrayList<>();
methodDescriptors.add(getSampleQueryMethodDescriptor);
methodDescriptors.add(listSampleQueriesMethodDescriptor);
methodDescriptors.add(createSampleQueryMethodDescriptor);
methodDescriptors.add(updateSampleQueryMethodDescriptor);
methodDescriptors.add(deleteSampleQueryMethodDescriptor);
methodDescriptors.add(importSampleQueriesMethodDescriptor);
return methodDescriptors;
}
public HttpJsonOperationsStub getHttpJsonOperationsStub() {
return httpJsonOperationsStub;
}
@Override
public UnaryCallable<GetSampleQueryRequest, SampleQuery> getSampleQueryCallable() {
return getSampleQueryCallable;
}
@Override
public UnaryCallable<ListSampleQueriesRequest, ListSampleQueriesResponse>
listSampleQueriesCallable() {
return listSampleQueriesCallable;
}
@Override
public UnaryCallable<ListSampleQueriesRequest, ListSampleQueriesPagedResponse>
listSampleQueriesPagedCallable() {
return listSampleQueriesPagedCallable;
}
@Override
public UnaryCallable<CreateSampleQueryRequest, SampleQuery> createSampleQueryCallable() {
return createSampleQueryCallable;
}
@Override
public UnaryCallable<UpdateSampleQueryRequest, SampleQuery> updateSampleQueryCallable() {
return updateSampleQueryCallable;
}
@Override
public UnaryCallable<DeleteSampleQueryRequest, Empty> deleteSampleQueryCallable() {
return deleteSampleQueryCallable;
}
@Override
public UnaryCallable<ImportSampleQueriesRequest, Operation> importSampleQueriesCallable() {
return importSampleQueriesCallable;
}
@Override
public OperationCallable<
ImportSampleQueriesRequest, ImportSampleQueriesResponse, ImportSampleQueriesMetadata>
importSampleQueriesOperationCallable() {
return importSampleQueriesOperationCallable;
}
@Override
public final void close() {
try {
backgroundResources.close();
} catch (RuntimeException e) {
throw e;
} catch (Exception e) {
throw new IllegalStateException("Failed to close resource", e);
}
}
@Override
public void shutdown() {
backgroundResources.shutdown();
}
@Override
public boolean isShutdown() {
return backgroundResources.isShutdown();
}
@Override
public boolean isTerminated() {
return backgroundResources.isTerminated();
}
@Override
public void shutdownNow() {
backgroundResources.shutdownNow();
}
@Override
public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException {
return backgroundResources.awaitTermination(duration, unit);
}
}
|
apache/ignite-3 | 37,260 | modules/distribution-zones/src/main/java/org/apache/ignite/internal/distributionzones/rebalance/RebalanceUtil.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.distributionzones.rebalance;
import static java.nio.charset.StandardCharsets.UTF_8;
import static java.util.concurrent.CompletableFuture.allOf;
import static java.util.stream.Collectors.toList;
import static java.util.stream.Collectors.toSet;
import static org.apache.ignite.internal.distributionzones.rebalance.AssignmentUtil.metastoreAssignments;
import static org.apache.ignite.internal.distributionzones.rebalance.AssignmentUtil.partitionIds;
import static org.apache.ignite.internal.distributionzones.rebalance.RebalanceUtil.UpdateStatus.ASSIGNMENT_NOT_UPDATED;
import static org.apache.ignite.internal.distributionzones.rebalance.RebalanceUtil.UpdateStatus.OUTDATED_UPDATE_RECEIVED;
import static org.apache.ignite.internal.distributionzones.rebalance.RebalanceUtil.UpdateStatus.PENDING_KEY_UPDATED;
import static org.apache.ignite.internal.distributionzones.rebalance.RebalanceUtil.UpdateStatus.PLANNED_KEY_REMOVED_EMPTY_PENDING;
import static org.apache.ignite.internal.distributionzones.rebalance.RebalanceUtil.UpdateStatus.PLANNED_KEY_REMOVED_EQUALS_PENDING;
import static org.apache.ignite.internal.distributionzones.rebalance.RebalanceUtil.UpdateStatus.PLANNED_KEY_UPDATED;
import static org.apache.ignite.internal.metastorage.dsl.Conditions.and;
import static org.apache.ignite.internal.metastorage.dsl.Conditions.exists;
import static org.apache.ignite.internal.metastorage.dsl.Conditions.notExists;
import static org.apache.ignite.internal.metastorage.dsl.Conditions.or;
import static org.apache.ignite.internal.metastorage.dsl.Conditions.value;
import static org.apache.ignite.internal.metastorage.dsl.Operations.ops;
import static org.apache.ignite.internal.metastorage.dsl.Operations.put;
import static org.apache.ignite.internal.metastorage.dsl.Operations.remove;
import static org.apache.ignite.internal.metastorage.dsl.Statements.iif;
import static org.apache.ignite.internal.partitiondistribution.PartitionDistributionUtils.calculateAssignmentForPartition;
import static org.apache.ignite.internal.partitiondistribution.PendingAssignmentsCalculator.pendingAssignmentsCalculator;
import static org.apache.ignite.internal.util.ByteUtils.longToBytesKeepingOrder;
import static org.apache.ignite.internal.util.CompletableFutures.nullCompletedFuture;
import static org.apache.ignite.internal.util.StringUtils.toStringWithoutPrefix;
import java.util.Collection;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.concurrent.CompletableFuture;
import java.util.concurrent.ConcurrentHashMap;
import java.util.stream.IntStream;
import org.apache.ignite.internal.catalog.descriptors.CatalogTableDescriptor;
import org.apache.ignite.internal.catalog.descriptors.CatalogZoneDescriptor;
import org.apache.ignite.internal.catalog.descriptors.ConsistencyMode;
import org.apache.ignite.internal.hlc.HybridTimestamp;
import org.apache.ignite.internal.lang.ByteArray;
import org.apache.ignite.internal.logger.IgniteLogger;
import org.apache.ignite.internal.logger.Loggers;
import org.apache.ignite.internal.metastorage.Entry;
import org.apache.ignite.internal.metastorage.MetaStorageManager;
import org.apache.ignite.internal.metastorage.dsl.Condition;
import org.apache.ignite.internal.metastorage.dsl.Iif;
import org.apache.ignite.internal.partitiondistribution.Assignment;
import org.apache.ignite.internal.partitiondistribution.Assignments;
import org.apache.ignite.internal.partitiondistribution.AssignmentsChain;
import org.apache.ignite.internal.partitiondistribution.AssignmentsQueue;
import org.apache.ignite.internal.replicator.TablePartitionId;
import org.apache.ignite.internal.util.ExceptionUtils;
import org.jetbrains.annotations.Nullable;
import org.jetbrains.annotations.TestOnly;
/**
* Util class for methods needed for the rebalance process.
* TODO: https://issues.apache.org/jira/browse/IGNITE-22522 remove this class and use {@link ZoneRebalanceUtil} instead
* after switching to zone-based replication.
*/
public class RebalanceUtil {
/** Logger. */
private static final IgniteLogger LOG = Loggers.forClass(RebalanceUtil.class);
/** Key prefix for planned assignments. */
public static final String PLANNED_ASSIGNMENTS_PREFIX = "assignments.planned.";
/** Key prefix for pending assignments. */
public static final String PENDING_ASSIGNMENTS_QUEUE_PREFIX = "assignments.pending.";
public static final byte[] PENDING_ASSIGNMENTS_QUEUE_PREFIX_BYTES = "assignments.pending.".getBytes(UTF_8);
/** Key prefix for stable assignments. */
public static final String STABLE_ASSIGNMENTS_PREFIX = "assignments.stable.";
public static final byte[] STABLE_ASSIGNMENTS_PREFIX_BYTES = STABLE_ASSIGNMENTS_PREFIX.getBytes(UTF_8);
/** Key prefix for switch reduce assignments. */
public static final String ASSIGNMENTS_SWITCH_REDUCE_PREFIX = "assignments.switch.reduce.";
public static final byte[] ASSIGNMENTS_SWITCH_REDUCE_PREFIX_BYTES = ASSIGNMENTS_SWITCH_REDUCE_PREFIX.getBytes(UTF_8);
/** Key prefix for switch append assignments. */
public static final String ASSIGNMENTS_SWITCH_APPEND_PREFIX = "assignments.switch.append.";
/** Key prefix for change trigger keys. */
public static final String PENDING_CHANGE_TRIGGER_PREFIX = "pending.change.trigger.";
static final byte[] PENDING_CHANGE_TRIGGER_PREFIX_BYTES = PENDING_CHANGE_TRIGGER_PREFIX.getBytes(UTF_8);
private static final String ASSIGNMENTS_CHAIN_PREFIX = "assignments.chain.";
/**
* Status values for methods like {@link #updatePendingAssignmentsKeys}.
*/
public enum UpdateStatus {
/**
* Return code of metastore multi-invoke which identifies,
* that pending key was updated to new value (i.e. there is no active rebalance at the moment of call).
*/
PENDING_KEY_UPDATED,
/**
* Return code of metastore multi-invoke which identifies,
* that planned key was updated to new value (i.e. there is an active rebalance at the moment of call).
*/
PLANNED_KEY_UPDATED,
/**
* Return code of metastore multi-invoke which identifies,
* that planned key was removed, because current rebalance is already have the same target.
*/
PLANNED_KEY_REMOVED_EQUALS_PENDING,
/**
* Return code of metastore multi-invoke which identifies,
* that planned key was removed, because current assignment is empty.
*/
PLANNED_KEY_REMOVED_EMPTY_PENDING,
/**
* Return code of metastore multi-invoke which identifies,
* that assignments do not need to be updated.
*/
ASSIGNMENT_NOT_UPDATED,
/**
* Return code of metastore multi-invoke which identifies,
* that this trigger event was already processed by another node and must be skipped.
*/
OUTDATED_UPDATE_RECEIVED;
private static final UpdateStatus[] VALUES = values();
public static UpdateStatus valueOf(int ordinal) {
return VALUES[ordinal];
}
}
/**
* Update keys that related to rebalance algorithm in Meta Storage. Keys are specific for partition.
*
* @param tableDescriptor Table descriptor.
* @param partId Unique identifier of a partition.
* @param dataNodes Data nodes.
* @param partitions Number of partitions.
* @param replicas Number of replicas for a table.
* @param consensusGroupSize Number of nodes in a consensus group.
* @param revision Revision of Meta Storage that is specific for the assignment update.
* @param metaStorageMgr Meta Storage manager.
* @param partNum Partition id.
* @param tableCfgPartAssignments Table configuration assignments.
* @return Future representing result of updating keys in {@code metaStorageMgr}
*/
public static CompletableFuture<Void> updatePendingAssignmentsKeys(
CatalogTableDescriptor tableDescriptor,
TablePartitionId partId,
Collection<String> dataNodes,
int partitions,
int replicas,
int consensusGroupSize,
long revision,
HybridTimestamp timestamp,
MetaStorageManager metaStorageMgr,
int partNum,
Set<Assignment> tableCfgPartAssignments,
long assignmentsTimestamp,
Set<String> aliveNodes,
ConsistencyMode consistencyMode
) {
ByteArray partChangeTriggerKey = pendingChangeTriggerKey(partId);
ByteArray partAssignmentsPendingKey = pendingPartAssignmentsQueueKey(partId);
ByteArray partAssignmentsPlannedKey = plannedPartAssignmentsKey(partId);
ByteArray partAssignmentsStableKey = stablePartAssignmentsKey(partId);
Set<Assignment> calculatedAssignments = calculateAssignmentForPartition(
dataNodes,
partNum,
partitions,
replicas,
consensusGroupSize
);
Set<Assignment> targetAssignmentSet;
if (consistencyMode == ConsistencyMode.HIGH_AVAILABILITY) {
// All complicated logic here is needed because we want to return back to stable nodes
// that are returned back after majority is lost and stable was narrowed.
// Let's consider example:
// scale down big enough (for example, infinite)
// stable = [A, B, C], dataNodes = [A, B, C]
// B, C left, stable = [A] due to partition reset, dataNodes = [A, B, C]
// B returned, we want stable = [A, B], but in terms of data nodes they are not changed and equal [A, B, C]
// So, because scale up mechanism in this case won't adjust stable, we need to add B to stable manually.
// General idea is to filter offline nodes from data nodes, but we need to be careful and do not remove nodes
// bypassing scale down mechanism. If node is offline and presented in previous stable, we won't remove that node.
// First of all, we remove offline nodes from calculated assignments
Set<Assignment> resultingAssignments = calculatedAssignments
.stream()
.filter(a -> aliveNodes.contains(a.consistentId()))
.collect(toSet());
// Here we re-introduce nodes that currently exist in the stable configuration
// but were previously removed without using the normal scale-down process.
for (Assignment assignment : tableCfgPartAssignments) {
if (calculatedAssignments.contains(assignment)) {
resultingAssignments.add(assignment);
}
}
targetAssignmentSet = resultingAssignments;
} else {
targetAssignmentSet = calculatedAssignments;
}
boolean isNewAssignments = !tableCfgPartAssignments.equals(targetAssignmentSet);
Assignments targetAssignments = Assignments.of(targetAssignmentSet, assignmentsTimestamp);
AssignmentsQueue partAssignmentsPendingQueue = pendingAssignmentsCalculator()
.stable(Assignments.of(tableCfgPartAssignments, assignmentsTimestamp))
.target(targetAssignments)
.toQueue();
byte[] partAssignmentsPlannedBytes = targetAssignments.toBytes();
byte[] partAssignmentsPendingQueueBytes = partAssignmentsPendingQueue.toBytes();
// if empty(partition.change.trigger) || partition.change.trigger < event.timestamp:
// if empty(partition.assignments.pending)
// && ((isNewAssignments && empty(partition.assignments.stable))
// || (partition.assignments.stable != calcPartAssignments() && !empty(partition.assignments.stable))):
// partition.assignments.pending = partAssignmentsPendingQueue
// partition.change.trigger = event.timestamp
// else:
// if partition.assignments.pending != partAssignmentsPendingQueue && !empty(partition.assignments.pending)
// partition.assignments.planned = calcPartAssignments()
// partition.change.trigger = event.timestamp
// else if partition.assignments.pending == partAssignmentsPendingQueue
// remove(partition.assignments.planned)
// partition.change.trigger = event.timestamp
// message after the metastorage invoke:
// "Remove planned key because current pending key has the same value."
// else if empty(partition.assignments.pending)
// remove(partition.assignments.planned)
// partition.change.trigger = event.timestamp
// message after the metastorage invoke:
// "Remove planned key because pending is empty and calculated assignments are equal to current assignments."
// else:
// skip
Condition newAssignmentsCondition = exists(partAssignmentsStableKey)
.and(value(partAssignmentsStableKey).ne(partAssignmentsPlannedBytes));
if (isNewAssignments) {
newAssignmentsCondition = notExists(partAssignmentsStableKey).or(newAssignmentsCondition);
}
byte[] timestampBytes = longToBytesKeepingOrder(timestamp.longValue());
Iif iif = iif(or(notExists(partChangeTriggerKey), value(partChangeTriggerKey).lt(timestampBytes)),
iif(and(notExists(partAssignmentsPendingKey), newAssignmentsCondition),
ops(
put(partAssignmentsPendingKey, partAssignmentsPendingQueueBytes),
put(partChangeTriggerKey, timestampBytes)
).yield(PENDING_KEY_UPDATED.ordinal()),
iif(and(value(partAssignmentsPendingKey).ne(partAssignmentsPendingQueueBytes), exists(partAssignmentsPendingKey)),
ops(
put(partAssignmentsPlannedKey, partAssignmentsPlannedBytes),
put(partChangeTriggerKey, timestampBytes)
).yield(PLANNED_KEY_UPDATED.ordinal()),
iif(value(partAssignmentsPendingKey).eq(partAssignmentsPendingQueueBytes),
ops(
remove(partAssignmentsPlannedKey),
put(partChangeTriggerKey, timestampBytes)
).yield(PLANNED_KEY_REMOVED_EQUALS_PENDING.ordinal()),
iif(notExists(partAssignmentsPendingKey),
ops(
remove(partAssignmentsPlannedKey),
put(partChangeTriggerKey, timestampBytes)
).yield(PLANNED_KEY_REMOVED_EMPTY_PENDING.ordinal()),
ops().yield(ASSIGNMENT_NOT_UPDATED.ordinal()))
))),
ops().yield(OUTDATED_UPDATE_RECEIVED.ordinal()));
return metaStorageMgr.invoke(iif).thenAccept(sr -> {
switch (UpdateStatus.valueOf(sr.getAsInt())) {
case PENDING_KEY_UPDATED:
LOG.info(
"Update metastore pending partitions key [key={}, partition={}, table={}/{}, newVal={}]",
partAssignmentsPendingKey.toString(), partNum, tableDescriptor.id(), tableDescriptor.name(),
partAssignmentsPendingQueue);
break;
case PLANNED_KEY_UPDATED:
LOG.info(
"Update metastore planned partitions key [key={}, partition={}, table={}/{}, newVal={}]",
partAssignmentsPlannedKey, partNum, tableDescriptor.id(), tableDescriptor.name(),
targetAssignmentSet
);
break;
case PLANNED_KEY_REMOVED_EQUALS_PENDING:
LOG.info(
"Remove planned key because current pending key has the same value [key={}, partition={}, table={}/{}, val={}]",
partAssignmentsPlannedKey.toString(), partNum, tableDescriptor.id(), tableDescriptor.name(),
targetAssignmentSet
);
break;
case PLANNED_KEY_REMOVED_EMPTY_PENDING:
LOG.info(
"Remove planned key because pending is empty and calculated assignments are equal to current assignments "
+ "[key={}, partition={}, table={}/{}, val={}]",
partAssignmentsPlannedKey.toString(), partNum, tableDescriptor.id(), tableDescriptor.name(),
targetAssignmentSet
);
break;
case ASSIGNMENT_NOT_UPDATED:
LOG.debug(
"Assignments are not updated [key={}, partition={}, table={}/{}, val={}]",
partAssignmentsPlannedKey.toString(), partNum, tableDescriptor.id(), tableDescriptor.name(),
targetAssignmentSet
);
break;
case OUTDATED_UPDATE_RECEIVED:
LOG.debug(
"Received outdated rebalance trigger event [revision={}, partition={}, table={}/{}]",
revision, partNum, tableDescriptor.id(), tableDescriptor.name());
break;
default:
throw new IllegalStateException("Unknown return code for rebalance metastore multi-invoke");
}
});
}
/**
* Triggers rebalance on all partitions of the provided table: that is, reads table assignments from
* the MetaStorage, computes new ones based on the current properties of the table, its zone and the
* provided data nodes, and, if the calculated assignments are different from the ones loaded from the
* MetaStorages, writes them as pending assignments.
*
* @param tableDescriptor Table descriptor.
* @param zoneDescriptor Zone descriptor.
* @param dataNodes Data nodes to use.
* @param storageRevision MetaStorage revision corresponding to this request.
* @param storageTimestamp MetaStorage timestamp corresponding to this request.
* @param metaStorageManager MetaStorage manager used to read/write assignments.
* @return Array of futures, one per partition of the table; the futures complete when the described
* rebalance triggering completes.
*/
public static CompletableFuture<Void> triggerAllTablePartitionsRebalance(
CatalogTableDescriptor tableDescriptor,
CatalogZoneDescriptor zoneDescriptor,
Set<String> dataNodes,
long storageRevision,
HybridTimestamp storageTimestamp,
MetaStorageManager metaStorageManager,
long assignmentsTimestamp,
Set<String> aliveNodes
) {
int[] partitionIds = partitionIds(zoneDescriptor.partitions());
return tableStableAssignments(metaStorageManager, tableDescriptor.id(), partitionIds)
.thenCompose(stableAssignments -> {
// In case of empty assignments due to initially empty data nodes, assignments will be recalculated
// after the transition to non-empty data nodes.
// In case of empty assignments due to interrupted table creation, assignments will be written
// during the node recovery and then replicas will be started.
// In case when data nodes become empty, assignments are not recalculated
// (see DistributionZoneRebalanceEngine.createDistributionZonesDataNodesListener).
if (stableAssignments.isEmpty()) {
return nullCompletedFuture();
}
return tablePartitionAssignment(
tableDescriptor,
zoneDescriptor,
dataNodes,
storageRevision,
storageTimestamp,
metaStorageManager,
assignmentsTimestamp,
stableAssignments,
aliveNodes
);
});
}
private static CompletableFuture<Void> tablePartitionAssignment(
CatalogTableDescriptor tableDescriptor,
CatalogZoneDescriptor zoneDescriptor,
Set<String> dataNodes,
long storageRevision,
HybridTimestamp storageTimestamp,
MetaStorageManager metaStorageManager,
long assignmentsTimestamp,
Map<Integer, Assignments> tableAssignments,
Set<String> aliveNodes
) {
// tableAssignments should not be empty. It is checked for emptiness before calling this method.
CompletableFuture<?>[] futures = new CompletableFuture[zoneDescriptor.partitions()];
for (int partId = 0; partId < zoneDescriptor.partitions(); partId++) {
TablePartitionId replicaGrpId = new TablePartitionId(tableDescriptor.id(), partId);
futures[partId] = updatePendingAssignmentsKeys(
tableDescriptor,
replicaGrpId,
dataNodes,
zoneDescriptor.partitions(),
zoneDescriptor.replicas(),
zoneDescriptor.consensusGroupSize(),
storageRevision,
storageTimestamp,
metaStorageManager,
partId,
tableAssignments.get(partId).nodes(),
assignmentsTimestamp,
aliveNodes,
zoneDescriptor.consistencyMode()
);
}
// This set is used to deduplicate exceptions (if there is an exception from upstream, for instance,
// when reading from MetaStorage, it will be encountered by every partition future) to avoid noise
// in the logs.
Set<Throwable> unwrappedCauses = ConcurrentHashMap.newKeySet();
for (int partId = 0; partId < futures.length; partId++) {
int finalPartId = partId;
futures[partId].exceptionally(e -> {
Throwable cause = ExceptionUtils.unwrapCause(e);
if (unwrappedCauses.add(cause)) {
// The exception is specific to this partition.
LOG.error(
"Exception on updating assignments for [tableId={}, name={}, partition={}]",
e,
tableDescriptor.id(), tableDescriptor.name(), finalPartId
);
} else {
// The exception is from upstream and not specific for this partition, so don't log the partition index.
LOG.error(
"Exception on updating assignments for [tableId={}, name={}]",
e,
tableDescriptor.id(), tableDescriptor.name()
);
}
return null;
});
}
return allOf(futures);
}
/**
* Key that is needed for skipping stale events of pending key change.
*
* @param partId Unique identifier of a partition.
* @return Key for a partition.
* @see <a href="https://github.com/apache/ignite-3/blob/main/modules/table/tech-notes/rebalance.md">Rebalance documentation</a>
*/
public static ByteArray pendingChangeTriggerKey(TablePartitionId partId) {
return new ByteArray(PENDING_CHANGE_TRIGGER_PREFIX + partId);
}
/**
* Key that is needed for the rebalance algorithm.
*
* @param partId Unique identifier of a partition.
* @return Key for a partition.
* @see <a href="https://github.com/apache/ignite-3/blob/main/modules/table/tech-notes/rebalance.md">Rebalance documentation</a>
*/
public static ByteArray pendingPartAssignmentsQueueKey(TablePartitionId partId) {
return new ByteArray(PENDING_ASSIGNMENTS_QUEUE_PREFIX + partId);
}
/**
* Key that is needed for the rebalance algorithm.
*
* @param partId Unique identifier of a partition.
* @return Key for a partition.
* @see <a href="https://github.com/apache/ignite-3/blob/main/modules/table/tech-notes/rebalance.md">Rebalance documentation</a>
*/
public static ByteArray plannedPartAssignmentsKey(TablePartitionId partId) {
return new ByteArray(PLANNED_ASSIGNMENTS_PREFIX + partId);
}
/**
* Key that is needed for the rebalance algorithm.
*
* @param partId Unique identifier of a partition.
* @return Key for a partition.
* @see <a href="https://github.com/apache/ignite-3/blob/main/modules/table/tech-notes/rebalance.md">Rebalance documentation</a>
*/
public static ByteArray stablePartAssignmentsKey(TablePartitionId partId) {
return new ByteArray(STABLE_ASSIGNMENTS_PREFIX + partId);
}
/**
* Key for the graceful restart in HA mode.
*
* @param partId Unique identifier of a partition.
* @return Key for a partition.
* @see <a href="https://cwiki.apache.org/confluence/display/IGNITE/IEP-131%3A+Partition+Majority+Unavailability+Handling">HA mode</a>
*/
public static ByteArray assignmentsChainKey(TablePartitionId partId) {
return new ByteArray(ASSIGNMENTS_CHAIN_PREFIX + partId);
}
/**
* Key that is needed for the rebalance algorithm.
*
* @param partId Unique identifier of a partition.
* @return Key for a partition.
* @see <a href="https://github.com/apache/ignite-3/blob/main/modules/table/tech-notes/rebalance.md">Rebalance documentation</a>
*/
public static ByteArray switchReduceKey(TablePartitionId partId) {
return new ByteArray(ASSIGNMENTS_SWITCH_REDUCE_PREFIX + partId);
}
/**
* Key that is needed for the rebalance algorithm.
*
* @param partId Unique identifier of a partition.
* @return Key for a partition.
* @see <a href="https://github.com/apache/ignite-3/blob/main/modules/table/tech-notes/rebalance.md">Rebalance documentation</a>
*/
public static ByteArray switchAppendKey(TablePartitionId partId) {
return new ByteArray(ASSIGNMENTS_SWITCH_APPEND_PREFIX + partId);
}
/**
* Converts the given {@code key}, stripping it off the given {@code prefix}, into a {@link TablePartitionId}.
*
* @param key Metastorage key.
* @param prefix Key prefix.
* @return {@link TablePartitionId} that was encoded in the key.
*/
public static TablePartitionId extractTablePartitionId(byte[] key, byte[] prefix) {
var tablePartitionIdString = toStringWithoutPrefix(key, prefix.length);
return TablePartitionId.fromString(tablePartitionIdString);
}
/**
* Extract table id from a metastorage key of partition.
*
* @param key Key.
* @param prefix Key prefix.
* @return Table id.
*/
public static int extractZoneId(byte[] key, byte[] prefix) {
return Integer.parseInt(toStringWithoutPrefix(key, prefix.length));
}
/**
* Checks if an error is recoverable, so we can retry a rebalance intent.
*
* @param t The throwable.
* @return {@code True} if this is a recoverable exception.
*/
public static boolean recoverable(Throwable t) {
// As long as we don't have a general failure handler, we assume that all errors are recoverable.
return true;
}
/**
* Removes nodes from set of nodes.
*
* @param minuend Set to remove nodes from.
* @param subtrahend Set of nodes to be removed.
* @return Result of the subtraction.
*/
public static <T> Set<T> subtract(Set<T> minuend, Set<T> subtrahend) {
return minuend.stream().filter(v -> !subtrahend.contains(v)).collect(toSet());
}
/**
* Adds nodes to the set of nodes.
*
* @param op1 First operand.
* @param op2 Second operand.
* @return Result of the addition.
*/
public static <T> Set<T> union(Set<T> op1, Set<T> op2) {
var res = new HashSet<>(op1);
res.addAll(op2);
return res;
}
/**
* Returns an intersection of two set of nodes.
*
* @param op1 First operand.
* @param op2 Second operand.
* @return Result of the intersection.
*/
public static <T> Set<T> intersect(Set<T> op1, Set<T> op2) {
return op1.stream().filter(op2::contains).collect(toSet());
}
/**
* Returns partition assignments from meta storage.
*
* @param metaStorageManager Meta storage manager.
* @param tableId Table ID.
* @param partitionId Partition ID.
* @return Future with partition assignments as a value.
*/
@TestOnly
public static CompletableFuture<Set<Assignment>> stablePartitionAssignments(
MetaStorageManager metaStorageManager,
int tableId,
int partitionId
) {
return metaStorageManager
.get(stablePartAssignmentsKey(new TablePartitionId(tableId, partitionId)))
.thenApply(e -> (e.value() == null) ? null : Assignments.fromBytes(e.value()).nodes());
}
/**
* Returns partition assignments from meta storage locally.
*
* @param metaStorageManager Meta storage manager.
* @param tablePartitionId Table partition id.
* @param revision Revision.
* @return Returns partition assignments from meta storage locally or {@code null} if assignments is absent.
*/
public static @Nullable Assignments stableAssignmentsGetLocally(
MetaStorageManager metaStorageManager,
TablePartitionId tablePartitionId,
long revision
) {
Entry entry = metaStorageManager.getLocally(stablePartAssignmentsKey(tablePartitionId), revision);
return (entry == null || entry.empty() || entry.tombstone()) ? null : Assignments.fromBytes(entry.value());
}
/**
* Returns partition assignments from meta storage locally.
*
* @param metaStorageManager Meta storage manager.
* @param tableId Table id.
* @param partitionNumber Partition number.
* @param revision Revision.
* @return Returns partition assignments from meta storage locally or {@code null} if assignments is absent.
*/
@Nullable
public static Set<Assignment> partitionAssignmentsGetLocally(
MetaStorageManager metaStorageManager,
int tableId,
int partitionNumber,
long revision
) {
Assignments assignments = stableAssignmentsGetLocally(metaStorageManager, new TablePartitionId(tableId, partitionNumber), revision);
return assignments == null ? null : assignments.nodes();
}
/**
* Returns stable table assignments for table partitions from meta storage.
*
* @param metaStorageManager Meta storage manager.
* @param tableId Table id.
* @param partitionIds IDs of partitions to get assignments for.
* @return Future with table assignments as a value.
*/
public static CompletableFuture<Map<Integer, Assignments>> tableStableAssignments(
MetaStorageManager metaStorageManager,
int tableId,
int[] partitionIds
) {
return metastoreAssignments(
metaStorageManager,
partitionIds,
partitionId -> stablePartAssignmentsKey(new TablePartitionId(tableId, partitionId))
).whenComplete((assignmentsMap, throwable) -> {
if (throwable == null) {
int numberOfMsPartitions = assignmentsMap.size();
assert numberOfMsPartitions == 0 || numberOfMsPartitions == partitionIds.length
: "Invalid number of partition entries received from meta storage [received="
+ numberOfMsPartitions + ", numberOfPartitions=" + partitionIds.length + ", tableId=" + tableId + "].";
}
});
}
/**
* Returns table assignments for all table partitions from meta storage locally. Assignments must be present.
*
* @param metaStorageManager Meta storage manager.
* @param tableId Table id.
* @param numberOfPartitions Number of partitions.
* @param revision Revision.
* @return Future with table assignments as a value.
*/
public static List<Assignments> tableAssignmentsGetLocally(
MetaStorageManager metaStorageManager,
int tableId,
int numberOfPartitions,
long revision
) {
return IntStream.range(0, numberOfPartitions)
.mapToObj(p -> {
Assignments assignments = stableAssignmentsGetLocally(metaStorageManager, new TablePartitionId(tableId, p), revision);
assert assignments != null;
return assignments;
})
.collect(toList());
}
/**
* Returns table pending assignments for all table partitions from meta storage locally.
*
* @param metaStorageManager Meta storage manager.
* @param tableId Table id.
* @param numberOfPartitions Number of partitions.
* @param revision Revision.
* @return Future with table assignments as a value.
*/
public static List<Assignments> tablePendingAssignmentsGetLocally(
MetaStorageManager metaStorageManager,
int tableId,
int numberOfPartitions,
long revision
) {
return IntStream.range(0, numberOfPartitions)
.mapToObj(p -> {
Entry e = metaStorageManager.getLocally(pendingPartAssignmentsQueueKey(new TablePartitionId(tableId, p)), revision);
return e != null && e.value() != null ? AssignmentsQueue.fromBytes(e.value()).poll() : null;
})
.collect(toList());
}
/**
* Returns assignments chains for all table partitions from meta storage locally.
*
* @param metaStorageManager Meta storage manager.
* @param tableId Table id.
* @param numberOfPartitions Number of partitions.
* @param revision Revision.
* @return Future with table assignments as a value.
*/
public static List<AssignmentsChain> tableAssignmentsChainGetLocally(
MetaStorageManager metaStorageManager,
int tableId,
int numberOfPartitions,
long revision
) {
return IntStream.range(0, numberOfPartitions)
.mapToObj(p -> assignmentsChainGetLocally(metaStorageManager, new TablePartitionId(tableId, p), revision))
.collect(toList());
}
/**
* Returns assignments chain from meta storage locally.
*
* @param metaStorageManager Meta storage manager.
* @param tablePartitionId Table partition id.
* @param revision Revision.
* @return Returns assignments chain from meta storage locally or {@code null} if assignments is absent.
*/
public static @Nullable AssignmentsChain assignmentsChainGetLocally(
MetaStorageManager metaStorageManager,
TablePartitionId tablePartitionId,
long revision
) {
Entry e = metaStorageManager.getLocally(assignmentsChainKey(tablePartitionId), revision);
return e != null ? AssignmentsChain.fromBytes(e.value()) : null;
}
}
|
apache/jackrabbit-oak | 37,082 | oak-jcr/src/main/java/org/apache/jackrabbit/oak/jcr/delegate/SessionDelegate.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.jackrabbit.oak.jcr.delegate;
import static java.util.Objects.requireNonNull;
import static java.util.concurrent.TimeUnit.MILLISECONDS;
import static java.util.concurrent.TimeUnit.NANOSECONDS;
import static java.util.concurrent.TimeUnit.SECONDS;
import static org.apache.jackrabbit.api.stats.RepositoryStatistics.Type.SESSION_READ_COUNTER;
import static org.apache.jackrabbit.api.stats.RepositoryStatistics.Type.SESSION_READ_DURATION;
import static org.apache.jackrabbit.api.stats.RepositoryStatistics.Type.SESSION_WRITE_COUNTER;
import static org.apache.jackrabbit.api.stats.RepositoryStatistics.Type.SESSION_WRITE_DURATION;
import static org.apache.jackrabbit.oak.commons.PathUtils.denotesRoot;
import java.io.IOException;
import java.util.Collections;
import java.util.HashMap;
import java.util.Iterator;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.Condition;
import java.util.concurrent.locks.Lock;
import java.util.concurrent.locks.ReentrantLock;
import javax.jcr.ItemExistsException;
import javax.jcr.PathNotFoundException;
import javax.jcr.RepositoryException;
import javax.jcr.nodetype.ConstraintViolationException;
import org.apache.jackrabbit.oak.api.AuthInfo;
import org.apache.jackrabbit.oak.api.CommitFailedException;
import org.apache.jackrabbit.oak.api.ContentSession;
import org.apache.jackrabbit.oak.api.QueryEngine;
import org.apache.jackrabbit.oak.api.Root;
import org.apache.jackrabbit.oak.api.Tree;
import org.apache.jackrabbit.oak.commons.PathUtils;
import org.apache.jackrabbit.oak.commons.properties.SystemPropertySupplier;
import org.apache.jackrabbit.oak.jcr.observation.EventFactory;
import org.apache.jackrabbit.oak.jcr.session.RefreshStrategy;
import org.apache.jackrabbit.oak.jcr.session.RefreshStrategy.Composite;
import org.apache.jackrabbit.oak.jcr.session.SessionNamespaces;
import org.apache.jackrabbit.oak.jcr.session.SessionSaveDelayer;
import org.apache.jackrabbit.oak.jcr.session.SessionStats;
import org.apache.jackrabbit.oak.jcr.session.SessionStats.Counters;
import org.apache.jackrabbit.oak.jcr.session.operation.SessionOperation;
import org.apache.jackrabbit.oak.plugins.identifier.IdentifierManager;
import org.apache.jackrabbit.oak.spi.security.SecurityProvider;
import org.apache.jackrabbit.oak.spi.security.authorization.AuthorizationConfiguration;
import org.apache.jackrabbit.oak.spi.security.authorization.permission.PermissionAware;
import org.apache.jackrabbit.oak.spi.security.authorization.permission.PermissionProvider;
import org.apache.jackrabbit.oak.spi.state.ReadyOnlyBuilderException;
import org.apache.jackrabbit.oak.stats.Clock;
import org.apache.jackrabbit.oak.stats.StatisticManager;
import org.apache.jackrabbit.oak.stats.MeterStats;
import org.apache.jackrabbit.oak.stats.TimerStats;
import org.jetbrains.annotations.NotNull;
import org.jetbrains.annotations.Nullable;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* TODO document
*/
public class SessionDelegate {
static final Logger log = LoggerFactory.getLogger(SessionDelegate.class);
static final Logger auditLogger = LoggerFactory.getLogger("org.apache.jackrabbit.oak.audit");
static final Logger readOperationLogger = LoggerFactory.getLogger("org.apache.jackrabbit.oak.jcr.operations.reads");
static final Logger writeOperationLogger = LoggerFactory.getLogger("org.apache.jackrabbit.oak.jcr.operations.writes");
// the bitmask used for trace level logging
// we use a bitmask instead of a counter to avoid the slow modulo operation:
// https://stackoverflow.com/questions/27977834/why-is-modulus-operator-slow
// so we use "if ((counter & LOG_TRACE_BIT_MASK) == 0) log(...)"
// instead of the slower "if ((counter % LOG_TRACE) == 0) log(...)"
// that means the values need to be some power of two, minus one
// log every 128th call by default
private static final long LOG_TRACE_BIT_MASK = SystemPropertySupplier.create(
"org.apache.jackrabbit.oak.jcr.operations.bitMask",
128L - 1).loggingTo(log).get();
// log a stack trace every ~1 million calls by default
private static final long LOG_TRACE_STACK_BIT_MASK = SystemPropertySupplier.create(
"org.apache.jackrabbit.oak.jcr.operations.stack.bitMask",
1024L * 1024 - 1).loggingTo(log).get();
// the counter used for logging
private static final AtomicLong LOG_COUNTER = new AtomicLong();
private final ContentSession contentSession;
private final SecurityProvider securityProvider;
private final RefreshAtNextAccess refreshAtNextAccess = new RefreshAtNextAccess();
private final SaveCountRefresh saveCountRefresh;
private final RefreshStrategy refreshStrategy;
private final Root root;
private final IdentifierManager idManager;
private final SessionStats sessionStats;
private final Clock clock;
// access time stamps and counters for statistics about this session
private final Counters sessionCounters;
// repository-wide counters for statistics about all sessions
private final MeterStats readCounter;
private final TimerStats readDuration;
private final MeterStats writeCounter;
private final TimerStats writeDuration;
private boolean isAlive = true;
private int sessionOpCount;
private long updateCount = 0;
private String userData = null;
private PermissionProvider permissionProvider;
private boolean refreshPermissionProvider = false;
/**
* The lock used to guarantee synchronized execution of repository
* operations. An explicit lock is used instead of normal Java
* synchronization in order to be able to log attempts to concurrently
* use a session.
*/
private final WarningLock lock = new WarningLock(new ReentrantLock());
private final SessionNamespaces namespaces;
private final SessionSaveDelayer sessionSaveDelayer;
/**
* Create a new session delegate for a {@code ContentSession}. The refresh behaviour of the
* session is governed by the value of the {@code refreshInterval} argument: if the session
* has been idle longer than that value, an implicit refresh will take place.
* In addition a refresh can always be scheduled from the next access by an explicit call
* to {@link #refreshAtNextAccess()}. This is typically done from within the observation event
* dispatcher in order.
*
* @param contentSession the content session
* @param securityProvider the security provider
* @param refreshStrategy the refresh strategy used for auto refreshing this session
* @param statisticManager the statistics manager for tracking session operations
* @param sessionSaveDelayer the session save delay mechanism
*/
public SessionDelegate(
@NotNull ContentSession contentSession,
@NotNull SecurityProvider securityProvider,
@NotNull RefreshStrategy refreshStrategy,
@NotNull ThreadLocal<Long> threadSaveCount,
@NotNull StatisticManager statisticManager,
@NotNull Clock clock,
@NotNull SessionSaveDelayer sessionSaveDelayer) {
this.contentSession = requireNonNull(contentSession);
this.securityProvider = requireNonNull(securityProvider);
this.root = contentSession.getLatestRoot();
this.namespaces = new SessionNamespaces(this.root);
this.saveCountRefresh = new SaveCountRefresh(requireNonNull(threadSaveCount));
this.refreshStrategy = Composite.create(requireNonNull(refreshStrategy),
refreshAtNextAccess, saveCountRefresh, new RefreshNamespaces(
namespaces));
this.idManager = new IdentifierManager(root);
this.clock = requireNonNull(clock);
requireNonNull(statisticManager);
this.sessionStats = new SessionStats(contentSession.toString(),
contentSession.getAuthInfo(), clock, refreshStrategy, this, statisticManager);
this.sessionCounters = sessionStats.getCounters();
readCounter = statisticManager.getMeter(SESSION_READ_COUNTER);
readDuration = statisticManager.getTimer(SESSION_READ_DURATION);
writeCounter = statisticManager.getMeter(SESSION_WRITE_COUNTER);
writeDuration = statisticManager.getTimer(SESSION_WRITE_DURATION);
this.sessionSaveDelayer = sessionSaveDelayer;
}
@NotNull
public SessionStats getSessionStats() {
return sessionStats;
}
public void refreshAtNextAccess() {
lock.lock();
try {
refreshAtNextAccess.refreshAtNextAccess(true);
} finally {
lock.unlock();
}
}
/**
* Wrap the passed {@code iterator} in an iterator that synchronizes
* all access to the underlying session.
* @param iterator iterator to synchronized
* @param <T>
* @return synchronized iterator
*/
public <T> Iterator<T> sync(Iterator<T> iterator) {
return new SynchronizedIterator<>(iterator, lock);
}
/**
* Performs the passed {@code SessionOperation} in a safe execution context. This
* context ensures that the session is refreshed if necessary and that refreshing
* occurs before the session operation is performed and the refreshing is done only
* once.
*
* @param sessionOperation the {@code SessionOperation} to perform
* @param <T> return type of {@code sessionOperation}
* @return the result of {@code sessionOperation.perform()}
* @throws RepositoryException
* @see #getRoot()
*/
@NotNull
public <T> T perform(@NotNull SessionOperation<T> sessionOperation) throws RepositoryException {
long t0 = clock.getTime();
// Acquire the exclusive lock for accessing session internals.
// No other session should be holding the lock, so we log a
// message to let the user know of such cases.
lock.lock(sessionOperation);
try {
prePerform(sessionOperation, t0);
try {
sessionOpCount++;
T result = sessionOperation.perform();
logOperationDetails(contentSession, sessionOperation);
return result;
} finally {
postPerform(sessionOperation, t0);
}
} catch (ReadyOnlyBuilderException e) {
throw new ConstraintViolationException(e);
} finally {
lock.unlock();
}
}
/**
* Same as {@link #perform(org.apache.jackrabbit.oak.jcr.session.operation.SessionOperation)}
* but with the option to return {@code null}; thus calling
* {@link org.apache.jackrabbit.oak.jcr.session.operation.SessionOperation#performNullable()}
*
* @param sessionOperation the {@code SessionOperation} to perform
* @param <T> return type of {@code sessionOperation}
* @return the result of {@code sessionOperation.performNullable()}, which
* might also be {@code null}.
* @throws RepositoryException
* @see #perform(org.apache.jackrabbit.oak.jcr.session.operation.SessionOperation)
*/
@Nullable
public <T> T performNullable(@NotNull SessionOperation<T> sessionOperation) throws RepositoryException {
long t0 = clock.getTime();
// Acquire the exclusive lock for accessing session internals.
// No other session should be holding the lock, so we log a
// message to let the user know of such cases.
lock.lock(sessionOperation);
try {
prePerform(sessionOperation, t0);
try {
sessionOpCount++;
T result = sessionOperation.performNullable();
logOperationDetails(contentSession, sessionOperation);
return result;
} finally {
postPerform(sessionOperation, t0);
}
} catch (ReadyOnlyBuilderException e) {
throw new ConstraintViolationException(e);
} finally {
lock.unlock();
}
}
/**
* Same as {@link #perform(org.apache.jackrabbit.oak.jcr.session.operation.SessionOperation)}
* for calls that don't expect any return value; thus calling
* {@link org.apache.jackrabbit.oak.jcr.session.operation.SessionOperation#performVoid()}.
*
* @param sessionOperation the {@code SessionOperation} to perform.
* @throws RepositoryException
* @see #perform(org.apache.jackrabbit.oak.jcr.session.operation.SessionOperation)
*/
public void performVoid(SessionOperation<Void> sessionOperation) throws RepositoryException {
long t0 = clock.getTime();
// Acquire the exclusive lock for accessing session internals.
// No other session should be holding the lock, so we log a
// message to let the user know of such cases.
lock.lock(sessionOperation);
try {
prePerform(sessionOperation, t0);
try {
sessionOpCount++;
sessionOperation.performVoid();
logOperationDetails(contentSession, sessionOperation);
} finally {
postPerform(sessionOperation, t0);
}
} catch (ReadyOnlyBuilderException e) {
throw new ConstraintViolationException(e);
} finally {
lock.unlock();
}
}
/**
* Same as {@link #perform(SessionOperation)} unless this method expects
* {@link SessionOperation#perform} <em>not</em> to throw a {@code RepositoryException}.
* Such exceptions will be wrapped into a {@code RuntimeException} and rethrown as they
* are considered an internal error.
*
* @param sessionOperation the {@code SessionOperation} to perform
* @param <T> return type of {@code sessionOperation}
* @return the result of {@code sessionOperation.perform()}
* @see #getRoot()
*/
@NotNull
public <T> T safePerform(SessionOperation<T> sessionOperation) {
try {
return perform(sessionOperation);
} catch (RepositoryException e) {
throw new RuntimeException("Unexpected exception thrown by operation " + sessionOperation, e);
}
}
/**
* Same as {@link #performNullable(SessionOperation)} unless this method expects
* {@link SessionOperation#performNullable} <em>not</em> to throw a {@code RepositoryException}.
* Such exceptions will be wrapped into a {@code RuntimeException} and rethrown as they
* are considered an internal error.
*
* @param sessionOperation the {@code SessionOperation} to perform
* @param <T> return type of {@code sessionOperation}
* @return the result of {@code sessionOperation.performNullable()}
*/
@Nullable
public <T> T safePerformNullable(SessionOperation<T> sessionOperation) {
try {
return performNullable(sessionOperation);
} catch (RepositoryException e) {
throw new RuntimeException("Unexpected exception thrown by operation " + sessionOperation, e);
}
}
@NotNull
public ContentSession getContentSession() {
return contentSession;
}
/**
* Determine whether this session is alive and has not been logged
* out or become stale by other means.
* @return {@code true} if this session is alive, {@code false} otherwise.
*/
public boolean isAlive() {
return isAlive;
}
/**
* Check that this session is alive.
* @throws RepositoryException if this session is not alive
* @see #isAlive()
*/
public void checkAlive() throws RepositoryException {
if (!isAlive()) {
throw new RepositoryException("This session has been closed.");
}
}
/**
* @return session update counter
*/
public long getUpdateCount() {
return updateCount;
}
public void setUserData(String userData) {
this.userData = userData;
}
private void commit(Root root, String path) throws CommitFailedException {
Map<String, Object> info = new HashMap<>();
if (path != null && !denotesRoot(path)) {
info.put(Root.COMMIT_PATH, path);
}
if (userData != null) {
info.put(EventFactory.USER_DATA, userData);
}
sessionSaveDelayer.delayIfNeeded(userData);
root.commit(Collections.unmodifiableMap(info));
if (permissionProvider != null && refreshPermissionProvider) {
permissionProvider.refresh();
}
}
/**
* Commits the changes currently in the transient space.
* TODO: Consolidate with save().
*
* @throws CommitFailedException if the commit failed
*/
public void commit() throws CommitFailedException {
commit(root, null);
}
/**
* Commits the changes applied to the given root. The user data (if any)
* currently attached to this session is passed as the commit message.
* Used both for normal save() calls and for the various
* direct-to-workspace operations.
*
* @throws CommitFailedException if the commit failed
*/
public void commit(Root root) throws CommitFailedException {
commit(root, null);
}
public void checkProtectedNode(String path) throws RepositoryException {
NodeDelegate node = getNode(path);
if (node == null) {
throw new PathNotFoundException(
"Node " + path + " does not exist.");
} else if (node.isProtected()) {
throw new ConstraintViolationException(
"Node " + path + " is protected.");
}
}
@NotNull
public AuthInfo getAuthInfo() {
return contentSession.getAuthInfo();
}
public void logout() {
if (!isAlive) {
// ignore
return;
}
isAlive = false;
// TODO
sessionStats.close();
try {
contentSession.close();
} catch (IOException e) {
log.warn("Error while closing connection", e);
}
}
@NotNull
public IdentifierManager getIdManager() {
return idManager;
}
@Nullable
public NodeDelegate getRootNode() {
return getNode("/");
}
/**
* {@code NodeDelegate} at the given path
* @param path Oak path
* @return The {@code NodeDelegate} at {@code path} or {@code null} if
* none exists or not accessible.
*/
@Nullable
public NodeDelegate getNode(String path) {
Tree tree = root.getTree(path);
return tree.exists() ? new NodeDelegate(this, tree) : null;
}
/**
* Returns the node or property delegate at the given path.
*
* @param path Oak path
* @return node or property delegate, or {@code null} if none exists
*/
@Nullable
public ItemDelegate getItem(String path) {
String name = PathUtils.getName(path);
if (name.isEmpty()) {
return getRootNode();
} else {
Tree parent = root.getTree(PathUtils.getParentPath(path));
Tree child = parent.getChild(name);
if (child.exists()) {
return new NodeDelegate(this, child);
} else if (parent.hasProperty(name)) {
return new PropertyDelegate(this, parent, name);
} else {
return null;
}
}
}
@Nullable
public NodeDelegate getNodeByIdentifier(String id) {
Tree tree = idManager.getTree(id);
if (tree == null || !tree.exists()) {
return null;
} else {
treeLookedUpByIdentifier(tree);
return new NodeDelegate(this, tree);
}
}
protected void treeLookedUpByIdentifier(@NotNull Tree tree) {
}
/**
* {@code PropertyDelegate} at the given path
* @param path Oak path
* @return The {@code PropertyDelegate} at {@code path} or {@code null} if
* none exists or not accessible.
*/
@Nullable
public PropertyDelegate getProperty(String path) {
Tree parent = root.getTree(PathUtils.getParentPath(path));
String name = PathUtils.getName(path);
return parent.hasProperty(name) ? new PropertyDelegate(this, parent,
name) : null;
}
public boolean hasPendingChanges() {
return root.hasPendingChanges();
}
/**
* Save the subtree rooted at the given {@code path}, or the entire
* transient space if given the root path or {@code null}.
* <p>
* This implementation only performs the save if the subtree rooted
* at {@code path} contains all transient changes and will throw an
* {@link javax.jcr.UnsupportedRepositoryOperationException} otherwise.
*
* @param path
* @throws RepositoryException
*/
public void save(String path) throws RepositoryException {
sessionCounters.saveTime = clock.getTime();
sessionCounters.saveCount++;
try {
commit(root, path);
} catch (CommitFailedException e) {
RepositoryException repositoryException = newRepositoryException(e);
sessionStats.failedSave(repositoryException);
throw repositoryException;
}
}
public void refresh(boolean keepChanges) {
sessionCounters.refreshTime = clock.getTime();
sessionCounters.refreshCount++;
if (keepChanges && hasPendingChanges()) {
root.rebase();
} else {
root.refresh();
}
if (permissionProvider != null && refreshPermissionProvider) {
permissionProvider.refresh();
}
}
//----------------------------------------------------------< Workspace >---
@NotNull
public String getWorkspaceName() {
return contentSession.getWorkspaceName();
}
/**
* Move a node
*
* @param srcPath oak path to the source node to copy
* @param destPath oak path to the destination
* @param transientOp whether or not to perform the move in transient space
* @throws RepositoryException
*/
public void move(String srcPath, String destPath, boolean transientOp)
throws RepositoryException {
Root moveRoot = transientOp ? root : contentSession.getLatestRoot();
// check destination
Tree dest = moveRoot.getTree(destPath);
if (dest.exists()) {
throw new ItemExistsException(destPath);
}
// check parent of destination
String destParentPath = PathUtils.getParentPath(destPath);
Tree destParent = moveRoot.getTree(destParentPath);
if (!destParent.exists()) {
throw new PathNotFoundException(PathUtils.getParentPath(destPath));
}
// check source exists
Tree src = moveRoot.getTree(srcPath);
if (!src.exists()) {
throw new PathNotFoundException(srcPath);
}
try {
if (!moveRoot.move(srcPath, destPath)) {
throw new RepositoryException("Cannot move node at " + srcPath + " to " + destPath);
}
if (!transientOp) {
sessionCounters.saveTime = clock.getTime();
sessionCounters.saveCount++;
commit(moveRoot);
refresh(true);
}
} catch (CommitFailedException e) {
throw newRepositoryException(e);
}
}
@NotNull
public QueryEngine getQueryEngine() {
return root.getQueryEngine();
}
@NotNull
public PermissionProvider getPermissionProvider() {
if (permissionProvider == null) {
if (root instanceof PermissionAware) {
permissionProvider = ((PermissionAware) root).getPermissionProvider();
} else {
permissionProvider = requireNonNull(securityProvider)
.getConfiguration(AuthorizationConfiguration.class)
.getPermissionProvider(root, getWorkspaceName(), getAuthInfo().getPrincipals());
refreshPermissionProvider = true;
}
}
return permissionProvider;
}
/**
* The current {@code Root} instance this session delegate instance operates on.
* To ensure the returned root reflects the correct repository revision access
* should only be done from within a {@link SessionOperation} closure through
* {@link #perform(SessionOperation)}.
*
* @return current root
*/
@NotNull
public Root getRoot() {
return root;
}
@Override
public String toString() {
return contentSession.toString();
}
//-----------------------------------------------------------< internal >---
private void prePerform(@NotNull SessionOperation<?> op, long t0) throws RepositoryException {
if (sessionOpCount == 0) {
// Refresh and precondition checks only for non re-entrant
// session operations. Don't refresh if this operation is a
// refresh operation itself or a save operation, which does an
// implicit refresh, or logout for obvious reasons.
if (!op.isRefresh() && !op.isSave() && !op.isLogout() &&
refreshStrategy.needsRefresh(SECONDS.convert(t0 - sessionCounters.accessTime, MILLISECONDS))) {
refresh(true);
refreshStrategy.refreshed();
updateCount++;
}
op.checkPreconditions();
}
}
private void postPerform(@NotNull SessionOperation<?> op, long t0) {
sessionCounters.accessTime = t0;
long dt = NANOSECONDS.convert(clock.getTime() - t0, MILLISECONDS);
sessionOpCount--;
if (op.isUpdate()) {
sessionCounters.writeTime = t0;
sessionCounters.writeCount++;
writeCounter.mark();
writeDuration.update(dt, TimeUnit.NANOSECONDS);
updateCount++;
} else {
sessionCounters.readTime = t0;
sessionCounters.readCount++;
readCounter.mark();
readDuration.update(dt, TimeUnit.NANOSECONDS);
}
if (op.isSave()) {
refreshAtNextAccess.refreshAtNextAccess(false);
// Force refreshing on access through other sessions on the same thread
saveCountRefresh.forceRefresh();
} else if (op.isRefresh()) {
refreshAtNextAccess.refreshAtNextAccess(false);
saveCountRefresh.refreshed();
}
}
private static <T> void logOperationDetails(ContentSession session, SessionOperation<T> ops) {
if (readOperationLogger.isTraceEnabled()
|| writeOperationLogger.isTraceEnabled()
|| auditLogger.isDebugEnabled()) {
Logger log = ops.isUpdate() ? writeOperationLogger : readOperationLogger;
long logId = LOG_COUNTER.incrementAndGet();
if ((logId & LOG_TRACE_BIT_MASK) == 0) {
if ((logId & LOG_TRACE_STACK_BIT_MASK) == 0) {
Exception e = new Exception("count: " + LOG_COUNTER);
log.trace("[{}] {}", session, ops, e);
} else {
log.trace("[{}] {}", session, ops);
}
}
//For a logout operation the auth info is not accessible
if (!ops.isLogout() && !ops.isRefresh() && !ops.isSave() && ops.isUpdate()) {
auditLogger.debug("[{}] [{}] {}", session.getAuthInfo().getUserID(), session, ops);
}
}
}
/**
* Wraps the given {@link CommitFailedException} instance using the
* appropriate {@link RepositoryException} subclass based on the
* {@link CommitFailedException#getType() type} of the given exception.
*
* @param exception typed commit failure exception
* @return matching repository exception
*/
private static RepositoryException newRepositoryException(CommitFailedException exception) {
return exception.asRepositoryException();
}
//------------------------------------------------------------< SynchronizedIterator >---
/**
* This iterator delegates to a backing iterator and synchronises
* all calls wrt. the lock passed to its constructor.
* @param <T>
*/
private static final class SynchronizedIterator<T> implements Iterator<T> {
private final Iterator<T> iterator;
private final WarningLock lock;
SynchronizedIterator(Iterator<T> iterator, WarningLock lock) {
this.iterator = iterator;
this.lock = lock;
}
@Override
public boolean hasNext() {
lock.lock(false, "hasNext()");
try {
return iterator.hasNext();
} finally {
lock.unlock();
}
}
@Override
public T next() {
lock.lock(false, "next()");
try {
return iterator.next();
} finally {
lock.unlock();
}
}
@Override
public void remove() {
lock.lock(true, "remove()");
try {
iterator.remove();
} finally {
lock.unlock();
}
}
}
/**
* A {@link Lock} implementation that has additional methods
* for acquiring the lock, which log a warning if the lock is
* already held by another thread and was also acquired through
* such a method.
*/
private static final class WarningLock implements Lock {
private final Lock lock;
// All access to members only *after* the lock has been acquired
private boolean isUpdate;
private Exception holderTrace;
private String holderThread;
private WarningLock(Lock lock) {
this.lock = lock;
}
public void lock(boolean isUpdate, Object operation) {
if (!lock.tryLock()) {
// Acquire the lock before logging the warnings. As otherwise race conditions
// on the involved fields might lead to wrong warnings.
lock.lock();
if (holderThread != null) {
if (this.isUpdate) {
warn(log, "Attempted to perform " + operation.toString() + " while thread " + holderThread +
" was concurrently writing to this session. Blocked until the " +
"other thread finished using this session. Please review your code " +
"to avoid concurrent use of a session.", holderTrace);
} else if (log.isDebugEnabled()) {
log.debug("Attempted to perform " + operation.toString() + " while thread " + holderThread +
" was concurrently reading from this session. Blocked until the " +
"other thread finished using this session. Please review your code " +
"to avoid concurrent use of a session.", holderTrace);
}
}
}
this.isUpdate = isUpdate;
if (log.isDebugEnabled()) {
holderTrace = new Exception("Stack trace of concurrent access to session");
}
holderThread = Thread.currentThread().getName();
}
private static void warn(Logger logger, String message, Exception stackTrace) {
if (stackTrace != null) {
logger.warn(message, stackTrace);
} else {
logger. warn(message);
}
}
public void lock(SessionOperation<?> sessionOperation) {
lock(sessionOperation.isUpdate(), sessionOperation);
}
@Override
public void lock() {
lock.lock();
holderTrace = null;
holderThread = null;
}
@Override
public void lockInterruptibly() throws InterruptedException {
lock.lockInterruptibly();
holderTrace = null;
holderThread = null;
}
@Override
public boolean tryLock() {
if (lock.tryLock()) {
holderTrace = null;
holderThread = null;
return true;
} else {
return false;
}
}
@Override
public boolean tryLock(long time, @NotNull TimeUnit unit) throws InterruptedException {
if (lock.tryLock(time, unit)) {
holderTrace = null;
holderThread = null;
return true;
} else {
return false;
}
}
@Override
public void unlock() {
lock.unlock();
}
@NotNull
@Override
public Condition newCondition() {
return lock.newCondition();
}
}
private static class RefreshAtNextAccess implements RefreshStrategy {
private boolean refreshAtNextAccess;
public void refreshAtNextAccess(boolean refreshAtNextAccess) {
this.refreshAtNextAccess = refreshAtNextAccess;
}
@Override
public boolean needsRefresh(long secondsSinceLastAccess) {
return refreshAtNextAccess;
}
@Override
public void refreshed() {
refreshAtNextAccess = false;
}
@Override
public String toString() {
return "Refresh on observation event";
}
}
private static class SaveCountRefresh implements RefreshStrategy {
/**
* The repository-wide {@link ThreadLocal} that keeps track of the number
* of saves performed in each thread.
*/
private final ThreadLocal<Long> threadSaveCount;
/**
* Local copy of the {@link #threadSaveCount} for the current thread.
* If the repository-wide counter differs from our local copy, then
* some other session would have done a commit or this session is
* being accessed from some other thread. In either case it's best to
* refresh this session to avoid unexpected behaviour.
*/
private long sessionSaveCount;
public SaveCountRefresh(ThreadLocal<Long> threadSaveCount) {
this.threadSaveCount = threadSaveCount;
this.sessionSaveCount = getThreadSaveCount();
}
public void forceRefresh() {
threadSaveCount.set(sessionSaveCount = (getThreadSaveCount() + 1));
}
@Override
public boolean needsRefresh(long secondsSinceLastAccess) {
return sessionSaveCount != getThreadSaveCount();
}
@Override
public void refreshed() {
sessionSaveCount = getThreadSaveCount();
}
private long getThreadSaveCount() {
Long c = threadSaveCount.get();
return c == null ? 0 : c;
}
@Override
public String toString() {
return "Refresh after a save on the same thread from a different session";
}
}
/**
* Read-only RefreshStrategy responsible for notifying the SessionNamespaces
* instance that a refresh was called
*/
private static class RefreshNamespaces implements RefreshStrategy {
private final SessionNamespaces namespaces;
public RefreshNamespaces(SessionNamespaces namespaces) {
this.namespaces = namespaces;
}
@Override
public boolean needsRefresh(long secondsSinceLastAccess) {
return false;
}
@Override
public void refreshed() {
this.namespaces.onSessionRefresh();
}
}
public SessionNamespaces getNamespaces() {
return namespaces;
}
}
|
apache/manifoldcf | 36,766 | connectors/ldap/connector/src/main/java/org/apache/manifoldcf/authorities/authorities/ldap/LDAPAuthority.java | /* $Id$ */
/**
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with this
* work for additional information regarding copyright ownership. The ASF
* licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
* WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
* License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.manifoldcf.authorities.authorities.ldap;
import java.io.*;
import java.util.*;
import javax.naming.*;
import javax.naming.directory.*;
import javax.naming.ldap.*;
import org.apache.manifoldcf.authorities.interfaces.*;
import org.apache.manifoldcf.authorities.system.Logging;
import org.apache.manifoldcf.core.interfaces.*;
import org.apache.manifoldcf.connectorcommon.interfaces.*;
import org.apache.manifoldcf.core.common.LDAPSSLSocketFactory;
import static org.apache.manifoldcf.connectorcommon.common.LdapEscaper.escapeDN;
import static org.apache.manifoldcf.connectorcommon.common.LdapEscaper.escapeFilter;
/**
* This is the Active Directory implementation of the IAuthorityConnector
* interface. Access tokens for this connector are simple SIDs, except for the
* "global deny" token, which is designed to allow the authority to shut off
* access to all authorized documents when the user is unrecognized or the
* domain controller does not respond.
*/
public class LDAPAuthority extends org.apache.manifoldcf.authorities.authorities.BaseAuthorityConnector {
public static final String _rcsid = "@(#)$Id$";
/**
* Session information for all DC's we talk with.
*/
private LdapContext session = null;
private StartTlsResponse tls = null;
private long sessionExpirationTime = -1L;
//private ConfigParams parameters;
private String bindUser;
private String bindPass;
private String serverProtocol;
private String serverName;
private String serverPort;
private String serverBase;
private String userBase;
private String userSearch;
private String groupBase;
private String groupSearch;
private String groupNameAttr;
private boolean groupMemberDN;
private boolean addUserRecord;
private List<String> forcedTokens;
private String userNameAttr;
private String sslKeystoreData;
private IKeystoreManager sslKeystore;
private long responseLifetime = 60000L; //60sec
private int LRUsize = 1000;
/**
* Cache manager.
*/
private ICacheManager cacheManager = null;
/**
* Constructor.
*/
public LDAPAuthority() {
}
/**
* Set thread context.
*/
@Override
public void setThreadContext(IThreadContext tc)
throws ManifoldCFException {
super.setThreadContext(tc);
cacheManager = CacheManagerFactory.make(tc);
}
/**
* Connect. The configuration parameters are included.
*
* @param configParams are the configuration parameters for this connection.
*/
@Override
public void connect(ConfigParams configParams) {
super.connect(configParams);
//parameters = configParams;
// Credentials
bindUser = configParams.getParameter("ldapBindUser");
bindPass = configParams.getObfuscatedParameter("ldapBindPass");
// We get the parameters here, so we can check them in case they are missing
serverProtocol = configParams.getParameter("ldapProtocol");
serverName = configParams.getParameter("ldapServerName");
serverPort = configParams.getParameter("ldapServerPort");
serverBase = configParams.getParameter("ldapServerBase");
sslKeystoreData = configParams.getParameter("sslKeystore");
userBase = configParams.getParameter("ldapUserBase");
userSearch = configParams.getParameter("ldapUserSearch");
groupBase = configParams.getParameter("ldapGroupBase");
groupSearch = configParams.getParameter("ldapGroupSearch");
groupNameAttr = configParams.getParameter("ldapGroupNameAttr");
userNameAttr = configParams.getParameter("ldapUserNameAttr");
groupMemberDN = "1".equals(getParam(configParams, "ldapGroupMemberDn", ""));
addUserRecord = "1".equals(getParam(configParams, "ldapAddUserRecord", ""));
forcedTokens = new ArrayList<String>();
int i = 0;
while (i < configParams.getChildCount()) {
ConfigNode sn = configParams.getChild(i++);
if (sn.getType().equals("access")) {
String token = "" + sn.getAttributeValue("token");
forcedTokens.add(token);
}
}
}
// All methods below this line will ONLY be called if a connect() call succeeded
// on this instance!
/**
* Session setup. Anything that might need to throw an exception should go
* here.
*/
protected LdapContext getSession()
throws ManifoldCFException {
try {
LDAPProtocolEnum ldapProtocol = retrieveLDAPProtocol();
if (session == null) {
if (serverName == null || serverName.length() == 0) {
Logging.authorityConnectors.error("Server name parameter missing but required");
throw new ManifoldCFException("Server name parameter missing but required");
}
if (serverPort == null || serverPort.length() == 0) {
Logging.authorityConnectors.error("Server port parameter missing but required");
throw new ManifoldCFException("Server port parameter missing but required");
}
if (serverBase == null) {
Logging.authorityConnectors.error("Server base parameter missing but required");
throw new ManifoldCFException("Server base parameter missing but required");
}
if (userBase == null) {
Logging.authorityConnectors.error("User base parameter missing but required");
throw new ManifoldCFException("User base parameter missing but required");
}
if (userSearch == null || userSearch.length() == 0) {
Logging.authorityConnectors.error("User search expression missing but required");
throw new ManifoldCFException("User search expression missing but required");
}
if (groupBase == null) {
Logging.authorityConnectors.error("Group base parameter missing but required");
throw new ManifoldCFException("Group base parameter missing but required");
}
if (groupSearch == null || groupSearch.length() == 0) {
Logging.authorityConnectors.error("Group search expression missing but required");
throw new ManifoldCFException("Group search expression missing but required");
}
if (groupNameAttr == null || groupNameAttr.length() == 0) {
Logging.authorityConnectors.error("Group name attribute missing but required");
throw new ManifoldCFException("Group name attribute missing but required");
}
if (userNameAttr == null || userNameAttr.length() == 0) {
Logging.authorityConnectors.error("User name attribute missing but required");
throw new ManifoldCFException("User name attribute missing but required");
}
if (sslKeystoreData != null) {
sslKeystore = KeystoreManagerFactory.make("", sslKeystoreData);
} else {
sslKeystore = KeystoreManagerFactory.make("");
}
final Hashtable env = new Hashtable();
env.put(Context.INITIAL_CONTEXT_FACTORY, "com.sun.jndi.ldap.LdapCtxFactory");
env.put(Context.PROVIDER_URL, "ldap://" + serverName + ":" + serverPort + "/" + serverBase);
if (LDAPProtocolEnum.LDAPS.equals(ldapProtocol)) {
// Set thread local for keystore stuff
LDAPSSLSocketFactory.setSocketFactoryProducer(sslKeystore);
env.put(Context.SECURITY_PROTOCOL, "ssl");
env.put("java.naming.ldap.factory.socket", "org.apache.manifoldcf.core.common.LDAPSSLSocketFactory");
}
if (bindUser != null && !bindUser.isEmpty()) {
env.put(Context.SECURITY_AUTHENTICATION, "simple");
env.put(Context.SECURITY_PRINCIPAL, bindUser);
env.put(Context.SECURITY_CREDENTIALS, bindPass);
}
Logging.authorityConnectors.info("LDAP Context environment properties: " + printLdapContextEnvironment(env));
session = new InitialLdapContext(env, null);
if (isLDAPTLS(ldapProtocol)) {
// Start TLS
StartTlsResponse tls = (StartTlsResponse) session.extendedOperation(new StartTlsRequest());
tls.negotiate(sslKeystore.getSecureSocketFactory());
}
} else {
if (isLDAPS(ldapProtocol)) {
// Set thread local for keystore stuff
LDAPSSLSocketFactory.setSocketFactoryProducer(sslKeystore);
}
session.reconnect(null);
}
sessionExpirationTime = System.currentTimeMillis() + 300000L;
return session;
} catch (AuthenticationException e) {
session = null;
sessionExpirationTime = -1L;
Logging.authorityConnectors.error("Authentication error: " + e.getMessage() + ", explanation: " + e.getExplanation(), e);
throw new ManifoldCFException("Authentication error: " + e.getMessage() + ", explanation: " + e.getExplanation(), e);
} catch (CommunicationException e) {
session = null;
sessionExpirationTime = -1L;
Logging.authorityConnectors.error("Communication error: " + e.getMessage(), e);
throw new ManifoldCFException("Communication error: " + e.getMessage(), e);
} catch (NamingException e) {
session = null;
sessionExpirationTime = -1L;
Logging.authorityConnectors.error("Naming exception: " + e.getMessage(), e);
throw new ManifoldCFException("Naming exception: " + e.getMessage(), e);
} catch (InterruptedIOException e) {
session = null;
sessionExpirationTime = -1L;
Logging.authorityConnectors.error("Interrupted IO error: " + e.getMessage());
throw new ManifoldCFException(e.getMessage(), ManifoldCFException.INTERRUPTED);
} catch (IOException e) {
session = null;
sessionExpirationTime = -1L;
Logging.authorityConnectors.error("IO error: " + e.getMessage(), e);
throw new ManifoldCFException("IO error: " + e.getMessage(), e);
}
}
/**
* Retrieves LDAPProtocol from serverProtocol String
*
* @return LDAPProtocolEnum
*/
private LDAPProtocolEnum retrieveLDAPProtocol() {
if (serverProtocol == null || serverProtocol.length() == 0) {
return LDAPProtocolEnum.LDAP;
}
final LDAPProtocolEnum ldapProtocol;
switch (serverProtocol.toUpperCase(Locale.ENGLISH)){
case "LDAP":
ldapProtocol = LDAPProtocolEnum.LDAP;
break;
case "LDAPS":
ldapProtocol = LDAPProtocolEnum.LDAPS;
break;
case "LDAP+TLS":
ldapProtocol = LDAPProtocolEnum.LDAP_TLS;
break;
case "LDAPS+TLS":
ldapProtocol = LDAPProtocolEnum.LDAPS_TLS;
break;
default:
ldapProtocol = LDAPProtocolEnum.LDAP;
}
return ldapProtocol;
}
/**
* Checks whether TLS is enabled for given LDAP Protocol
*
* @param ldapProtocol to check
* @return whether TLS is enabled or not
*/
private boolean isLDAPTLS (LDAPProtocolEnum ldapProtocol){
return LDAPProtocolEnum.LDAP_TLS.equals(ldapProtocol) || LDAPProtocolEnum.LDAPS_TLS.equals(ldapProtocol);
}
/**
* Checks whether LDAPS or LDAPS with TLS is enabled for given LDAP Protocol
*
* @param ldapProtocol to check
* @return whether LDAPS or LDAPS with TLS is enabled or not
*/
private boolean isLDAPS (LDAPProtocolEnum ldapProtocol){
return LDAPProtocolEnum.LDAPS.equals(ldapProtocol) || LDAPProtocolEnum.LDAPS_TLS.equals(ldapProtocol);
}
/**
* Check connection for sanity.
*/
@Override
public String check()
throws ManifoldCFException {
disconnectSession();
getSession();
// MHL for a real check of all the search etc.
return super.check();
}
/** This method is called to assess whether to count this connector instance should
* actually be counted as being connected.
*@return true if the connector instance is actually connected.
*/
@Override
public boolean isConnected()
{
return session != null;
}
/**
* Poll. The connection should be closed if it has been idle for too long.
*/
@Override
public void poll()
throws ManifoldCFException {
if (session != null && System.currentTimeMillis() > sessionExpirationTime) {
disconnectSession();
}
super.poll();
}
/**
* Disconnect a session.
*/
protected void disconnectSession() {
if (session != null) {
try {
if (tls != null)
tls.close();
session.close();
} catch (NamingException e) {
// Eat this error
} catch (IOException e) {
// Eat this error
}
tls = null;
session = null;
sessionExpirationTime = -1L;
}
}
/**
* Close the connection. Call this before discarding the repository connector.
*/
@Override
public void disconnect()
throws ManifoldCFException {
disconnectSession();
super.disconnect();
// Zero out all the stuff that we want to be sure we don't use again
serverName = null;
serverPort = null;
serverBase = null;
userBase = null;
userSearch = null;
groupBase = null;
groupSearch = null;
groupNameAttr = null;
userNameAttr = null;
forcedTokens = null;
sslKeystoreData = null;
sslKeystore = null;
}
protected String createCacheConnectionString() {
StringBuilder sb = new StringBuilder();
sb.append(serverName).append(":").append(serverPort).append("/").append(serverBase);
return sb.toString();
}
protected String createUserSearchString() {
StringBuilder sb = new StringBuilder();
sb.append(userBase).append("|").append(userSearch).append("|").append(userNameAttr).append("|").append(addUserRecord ? 'Y' : 'N');
return sb.toString();
}
protected String createGroupSearchString() {
StringBuilder sb = new StringBuilder();
sb.append(groupBase).append("|").append(groupSearch).append("|").append(groupNameAttr).append("|").append(groupMemberDN ? 'Y' : 'N');
return sb.toString();
}
/**
* Obtain the access tokens for a given user name.
*
* @param userName is the user name or identifier.
* @return the response tokens (according to the current authority). (Should
* throws an exception only when a condition cannot be properly described
* within the authorization response object.)
*/
@Override
public AuthorizationResponse getAuthorizationResponse(String userName)
throws ManifoldCFException {
getSession();
// Construct a cache description object
ICacheDescription objectDescription = new LdapAuthorizationResponseDescription(userName,
createCacheConnectionString(), createUserSearchString(), createGroupSearchString(), this.responseLifetime, this.LRUsize);
// Enter the cache
ICacheHandle ch = cacheManager.enterCache(new ICacheDescription[]{objectDescription}, null, null);
try {
ICacheCreateHandle createHandle = cacheManager.enterCreateSection(ch);
try {
// Lookup the object
AuthorizationResponse response = (AuthorizationResponse) cacheManager.lookupObject(createHandle, objectDescription);
if (response != null) {
return response;
}
// Create the object.
response = getAuthorizationResponseUncached(userName);
// Save it in the cache
cacheManager.saveObject(createHandle, objectDescription, response);
// And return it...
return response;
} finally {
cacheManager.leaveCreateSection(createHandle);
}
} finally {
cacheManager.leaveCache(ch);
}
}
protected AuthorizationResponse getAuthorizationResponseUncached(String userName)
throws ManifoldCFException {
getSession();
try {
//find user in LDAP tree
SearchResult usrRecord = getUserEntry(session, userName);
if (usrRecord == null) {
return RESPONSE_USERNOTFOUND;
}
ArrayList theGroups = new ArrayList();
theGroups.addAll(forcedTokens);
String usrName = userName.split("@")[0];
if (userNameAttr != null && !"".equals(userNameAttr)) {
if (usrRecord.getAttributes() != null) {
Attribute attr = usrRecord.getAttributes().get(userNameAttr);
if (attr != null) {
usrName = attr.get().toString();
if (addUserRecord) {
NamingEnumeration values = attr.getAll();
while (values.hasMore()) {
theGroups.add(values.next().toString());
}
}
}
}
}
if (groupSearch != null && !groupSearch.isEmpty()) {
//specify the LDAP search filter
String searchFilter = groupSearch.replace("{0}", escapeFilter(groupMemberDN ? usrRecord.getNameInNamespace() : usrName));
SearchControls searchCtls = new SearchControls();
searchCtls.setSearchScope(SearchControls.SUBTREE_SCOPE);
String returnedAtts[] = {groupNameAttr};
searchCtls.setReturningAttributes(returnedAtts);
NamingEnumeration answer = session.search(groupBase, searchFilter, searchCtls);
while (answer.hasMoreElements()) {
SearchResult sr = (SearchResult) answer.next();
Attributes attrs = sr.getAttributes();
if (attrs != null) {
NamingEnumeration values = attrs.get(groupNameAttr).getAll();
while (values.hasMore()) {
theGroups.add(values.next().toString());
}
}
}
}
String[] tokens = new String[theGroups.size()];
int k = 0;
while (k < tokens.length) {
tokens[k] = (String) theGroups.get(k);
k++;
}
return new AuthorizationResponse(tokens, AuthorizationResponse.RESPONSE_OK);
} catch (NameNotFoundException e) {
// This means that the user doesn't exist
Logging.authorityConnectors.error("User does not exists: "+ e.getMessage(), e);
return RESPONSE_USERNOTFOUND;
} catch (NamingException e) {
// Unreachable
Logging.authorityConnectors.error("Response Unreachable: "+ e.getMessage(), e);
return RESPONSE_UNREACHABLE;
}
}
/**
* Obtain the default access tokens for a given user name.
*
* @param userName is the user name or identifier.
* @return the default response tokens, presuming that the connect method
* fails.
*/
@Override
public AuthorizationResponse getDefaultAuthorizationResponse(String userName) {
// The default response if the getConnection method fails
return RESPONSE_UNREACHABLE;
}
/**
* Stringifies LDAP Context environment variable
* @param env LDAP Context environment variable
* @return Stringified LDAP Context environment. Password is masked if set.
*/
private String printLdapContextEnvironment(Hashtable env) {
Hashtable copyEnv = new Hashtable<>(env);
if (copyEnv.containsKey(Context.SECURITY_CREDENTIALS)){
copyEnv.put(Context.SECURITY_CREDENTIALS, "********");
}
return Arrays.toString(copyEnv.entrySet().toArray());
}
// UI support methods.
//
// These support methods are involved in setting up authority connection configuration information. The configuration methods cannot assume that the
// current authority object is connected. That is why they receive a thread context argument.
/**
* Output the configuration header section. This method is called in the head
* section of the connector's configuration page. Its purpose is to add the
* required tabs to the list, and to output any javascript methods that might
* be needed by the configuration editing HTML.
*
* @param threadContext is the local thread context.
* @param out is the output to which any HTML should be sent.
* @param parameters are the configuration parameters, as they currently
* exist, for this connection being configured.
* @param tabsArray is an array of tab names. Add to this array any tab names
* that are specific to the connector.
*/
@Override
public void outputConfigurationHeader(IThreadContext threadContext, IHTTPOutput out, Locale locale, ConfigParams parameters, List<String> tabsArray)
throws ManifoldCFException, IOException {
tabsArray.add(Messages.getString(locale, "LDAP.LDAP"));
tabsArray.add(Messages.getString(locale, "LDAP.ForcedTokens"));
final Map<String,Object> paramMap = new HashMap<String,Object>();
fillInLDAPTab(paramMap, out, parameters);
fillInForcedTokensTab(paramMap, out, parameters);
Messages.outputResourceWithVelocity(out, locale, "editConfiguration.js", paramMap);
}
/**
* Output the configuration body section. This method is called in the body
* section of the authority connector's configuration page. Its purpose is to
* present the required form elements for editing. The coder can presume that
* the HTML that is output from this configuration will be within appropriate
* <html>, <body>, and <form> tags. The name of the form is "editconnection".
*
* @param threadContext is the local thread context.
* @param out is the output to which any HTML should be sent.
* @param parameters are the configuration parameters, as they currently
* exist, for this connection being configured.
* @param tabName is the current tab name.
*/
@Override
public void outputConfigurationBody(IThreadContext threadContext, IHTTPOutput out, Locale locale, ConfigParams parameters, String tabName)
throws ManifoldCFException, IOException {
final Map<String,Object> paramMap = new HashMap<String,Object>();
paramMap.put("TabName",tabName);
fillInLDAPTab(paramMap, out, parameters);
fillInForcedTokensTab(paramMap, out, parameters);
Messages.outputResourceWithVelocity(out, locale, "editConfiguration_LDAP.html", paramMap);
Messages.outputResourceWithVelocity(out, locale, "editConfiguration_ForcedTokens.html", paramMap);
}
private boolean copyParam(IPostParameters variableContext, ConfigParams parameters, String name) {
String val = variableContext.getParameter(name);
if (val == null) {
return false;
}
parameters.setParameter(name, val);
return true;
}
private void copyParam(IPostParameters variableContext, ConfigParams parameters, String name, String def) {
String val = variableContext.getParameter(name);
if (val == null) {
val = def;
}
parameters.setParameter(name, val);
}
/**
* Process a configuration post. This method is called at the start of the
* authority connector's configuration page, whenever there is a possibility
* that form data for a connection has been posted. Its purpose is to gather
* form information and modify the configuration parameters accordingly. The
* name of the posted form is "editconnection".
*
* @param threadContext is the local thread context.
* @param variableContext is the set of variables available from the post,
* including binary file post information.
* @param parameters are the configuration parameters, as they currently
* exist, for this connection being configured.
* @return null if all is well, or a string error message if there is an error
* that should prevent saving of the connection (and cause a redirection to an
* error page).
*/
@Override
public String processConfigurationPost(IThreadContext threadContext, IPostParameters variableContext, Locale locale, ConfigParams parameters)
throws ManifoldCFException {
copyParam(variableContext, parameters, "ldapProtocol");
copyParam(variableContext, parameters, "ldapServerName");
copyParam(variableContext, parameters, "ldapServerPort");
copyParam(variableContext, parameters, "ldapServerBase");
copyParam(variableContext, parameters, "ldapUserBase");
copyParam(variableContext, parameters, "ldapUserSearch");
copyParam(variableContext, parameters, "ldapUserNameAttr");
copyParam(variableContext, parameters, "ldapGroupBase");
copyParam(variableContext, parameters, "ldapGroupSearch");
copyParam(variableContext, parameters, "ldapGroupNameAttr");
copyParam(variableContext, parameters, "ldapGroupMemberDn", "0"); //checkbox boolean value
copyParam(variableContext, parameters, "ldapAddUserRecord", "0"); //checkbox boolean value
copyParam(variableContext, parameters, "ldapBindUser");
final String bindPass = variableContext.getParameter("ldapBindPass");
if (bindPass != null) {
parameters.setObfuscatedParameter("ldapBindPass", variableContext.mapKeyToPassword(bindPass));
}
final String xc = variableContext.getParameter("tokencount");
if (xc != null) {
// Delete all tokens first
int i = 0;
while (i < parameters.getChildCount()) {
ConfigNode sn = parameters.getChild(i);
if (sn.getType().equals("access")) {
parameters.removeChild(i);
} else {
i++;
}
}
final int accessCount = Integer.parseInt(xc);
i = 0;
while (i < accessCount) {
final String accessDescription = "_" + Integer.toString(i);
final String accessOpName = "accessop" + accessDescription;
final String command = variableContext.getParameter(accessOpName);
if (command != null && command.equals("Delete")) {
// Next row
i++;
continue;
}
// Get the stuff we need
String accessSpec = variableContext.getParameter("spectoken" + accessDescription);
ConfigNode node = new ConfigNode("access");
node.setAttribute("token", accessSpec);
parameters.addChild(parameters.getChildCount(), node);
i++;
}
String op = variableContext.getParameter("accessop");
if (op != null && op.equals("Add")) {
String accessspec = variableContext.getParameter("spectoken");
ConfigNode node = new ConfigNode("access");
node.setAttribute("token", accessspec);
parameters.addChild(parameters.getChildCount(), node);
}
}
String sslKeystoreValue = variableContext.getParameter("sslkeystoredata");
final String sslConfigOp = variableContext.getParameter("sslconfigop");
if (sslConfigOp != null)
{
if (sslConfigOp.equals("Delete"))
{
final String alias = variableContext.getParameter("sslkeystorealias");
final IKeystoreManager mgr;
if (sslKeystoreValue != null)
mgr = KeystoreManagerFactory.make("",sslKeystoreValue);
else
mgr = KeystoreManagerFactory.make("");
mgr.remove(alias);
sslKeystoreValue = mgr.getString();
}
else if (sslConfigOp.equals("Add"))
{
String alias = IDFactory.make(threadContext);
byte[] certificateValue = variableContext.getBinaryBytes("sslcertificate");
final IKeystoreManager mgr;
if (sslKeystoreValue != null)
mgr = KeystoreManagerFactory.make("",sslKeystoreValue);
else
mgr = KeystoreManagerFactory.make("");
java.io.InputStream is = new java.io.ByteArrayInputStream(certificateValue);
String certError = null;
try
{
mgr.importCertificate(alias,is);
}
catch (Throwable e)
{
certError = e.getMessage();
}
finally
{
try
{
is.close();
}
catch (IOException e)
{
// Eat this exception
}
}
if (certError != null)
{
return "Illegal certificate: "+certError;
}
sslKeystoreValue = mgr.getString();
}
}
if (sslKeystoreValue != null)
parameters.setParameter("sslkeystore",sslKeystoreValue);
return null;
}
/**
* View configuration. This method is called in the body section of the
* authority connector's view configuration page. Its purpose is to present
* the connection information to the user. The coder can presume that the HTML
* that is output from this configuration will be within appropriate <html>
* and <body> tags.
*
* @param threadContext is the local thread context.
* @param out is the output to which any HTML should be sent.
* @param parameters are the configuration parameters, as they currently
* exist, for this connection being configured.
*/
@Override
public void viewConfiguration(IThreadContext threadContext, IHTTPOutput out, Locale locale, ConfigParams parameters)
throws ManifoldCFException, IOException {
final Map<String,Object> paramMap = new HashMap<String,Object>();
fillInLDAPTab(paramMap, out, parameters);
fillInForcedTokensTab(paramMap, out, parameters);
Messages.outputResourceWithVelocity(out, locale, "viewConfiguration.html", paramMap);
}
// Protected methods
private static String getParam(final ConfigParams parameters, final String name, final String def) {
String rval = parameters.getParameter(name);
return rval != null ? rval : def;
}
/** Fill in ForcedTokens tab */
protected static void fillInForcedTokensTab(Map<String,Object> velocityContext, IHTTPOutput out, ConfigParams parameters)
{
final List<String> forcedTokenList = new ArrayList<String>();
for (int i = 0; i < parameters.getChildCount(); i++) {
final ConfigNode sn = parameters.getChild(i);
if (sn.getType().equals("access")) {
forcedTokenList.add(sn.getAttributeValue("token"));
}
}
velocityContext.put("FORCEDTOKENS", forcedTokenList);
}
/** Fill in LDAP tab */
protected static void fillInLDAPTab(Map<String,Object> velocityContext, IHTTPOutput out, ConfigParams parameters)
{
velocityContext.put("FSERVERPROTOCOL", getParam(parameters, "ldapProtocol", "ldap"));
velocityContext.put("FSERVERNAME", getParam(parameters, "ldapServerName", ""));
velocityContext.put("FSERVERPORT", getParam(parameters, "ldapServerPort", "389"));
velocityContext.put("FSERVERBASE", getParam(parameters, "ldapServerBase", ""));
String sslKeystoreData = parameters.getParameter("sslkeystore");
if (sslKeystoreData != null)
velocityContext.put("SSLKEYSTOREDATA", sslKeystoreData);
velocityContext.put("FUSERBASE", getParam(parameters, "ldapUserBase", "ou=People"));
velocityContext.put("FUSERSEARCH", getParam(parameters, "ldapUserSearch", "(&(objectClass=inetOrgPerson)(uid={0}))"));
velocityContext.put("FUSERNAMEATTR", getParam(parameters, "ldapUserNameAttr", "uid"));
velocityContext.put("FADDUSERRECORD", getParam(parameters, "ldapAddUserRecord", ""));
velocityContext.put("FGROUPBASE", getParam(parameters, "ldapGroupBase", "ou=Groups"));
velocityContext.put("FGROUPSEARCH", getParam(parameters, "ldapGroupSearch", "(&(objectClass=groupOfNames)(member={0}))"));
velocityContext.put("FGROUPNAMEATTR", getParam(parameters, "ldapGroupNameAttr", "cn"));
velocityContext.put("FGROUPMEMBERDN", getParam(parameters, "ldapGroupMemberDn", ""));
velocityContext.put("FBINDUSER", getParam(parameters, "ldapBindUser", ""));
String fBindPass = parameters.getObfuscatedParameter("ldapBindPass");
if (fBindPass == null)
fBindPass = "";
else
fBindPass = out.mapPasswordToKey(fBindPass);
velocityContext.put("FBINDPASS", fBindPass);
Map<String,String> sslCertificatesMap = null;
String message = null;
try {
final IKeystoreManager localSslKeystore;
if (sslKeystoreData == null)
localSslKeystore = KeystoreManagerFactory.make("");
else
localSslKeystore = KeystoreManagerFactory.make("",sslKeystoreData);
// List the individual certificates in the store, with a delete button for each
final String[] contents = localSslKeystore.getContents();
if (contents.length > 0)
{
sslCertificatesMap = new HashMap<>();
for (final String alias : contents)
{
String description = localSslKeystore.getDescription(alias);
if (description.length() > 128)
description = description.substring(0,125) + "...";
sslCertificatesMap.put(alias, description);
}
}
} catch (ManifoldCFException e) {
message = e.getMessage();
Logging.authorityConnectors.warn(e);
}
if(sslCertificatesMap != null)
velocityContext.put("SSLCERTIFICATESMAP", sslCertificatesMap);
if(message != null)
velocityContext.put("MESSAGE", message);
}
/**
* Obtain the user LDAP record for a given user logon name.
*
* @param ctx is the ldap context to use.
* @param userName (Domain Logon Name) is the user name or identifier.
* DC=qa-ad-76,DC=metacarta,DC=com)
* @return SearchResult for given domain user logon name. (Should throws an
* exception if user is not found.)
*/
protected SearchResult getUserEntry(LdapContext ctx, String userName)
throws ManifoldCFException {
String searchFilter = userSearch.replaceAll("\\{0\\}", escapeDN(userName.split("@")[0]));
SearchControls searchCtls = new SearchControls();
searchCtls.setSearchScope(SearchControls.SUBTREE_SCOPE);
try {
NamingEnumeration answer = ctx.search(userBase, searchFilter, searchCtls);
if (answer.hasMoreElements()) {
return (SearchResult) answer.next();
}
return null;
} catch (Exception e) {
throw new ManifoldCFException(e.getMessage(), e);
}
}
protected static StringSet emptyStringSet = new StringSet();
/**
* This is the cache object descriptor for cached access tokens from this
* connector.
*/
protected class LdapAuthorizationResponseDescription extends org.apache.manifoldcf.core.cachemanager.BaseDescription {
/**
* The user name
*/
protected String userName;
/**
* LDAP connection string with server name and base DN
*/
protected String connectionString;
/**
* User search definition
*/
protected String userSearch;
/**
* Group search definition
*/
protected String groupSearch;
/**
* The response lifetime
*/
protected long responseLifetime;
/**
* The expiration time
*/
protected long expirationTime = -1;
/**
* Constructor.
*/
public LdapAuthorizationResponseDescription(String userName, String connectionString, String userSearch, String groupSearch, long responseLifetime, int LRUsize) {
super("LDAPAuthority", LRUsize);
this.userName = userName;
this.connectionString = connectionString;
this.userSearch = userSearch;
this.groupSearch = groupSearch;
this.responseLifetime = responseLifetime;
}
/**
* Return the invalidation keys for this object.
*/
public StringSet getObjectKeys() {
return emptyStringSet;
}
/**
* Get the critical section name, used for synchronizing the creation of the
* object
*/
public String getCriticalSectionName() {
StringBuilder sb = new StringBuilder(getClass().getName());
sb.append("-").append(userName).append("-").append(connectionString);
return sb.toString();
}
/**
* Return the object expiration interval
*/
@Override
public long getObjectExpirationTime(long currentTime) {
if (expirationTime == -1) {
expirationTime = currentTime + responseLifetime;
}
return expirationTime;
}
@Override
public int hashCode() {
return userName.hashCode() + connectionString.hashCode() + userSearch.hashCode() + groupSearch.hashCode();
}
@Override
public boolean equals(Object o) {
if (!(o instanceof LdapAuthorizationResponseDescription)) {
return false;
}
LdapAuthorizationResponseDescription ard = (LdapAuthorizationResponseDescription) o;
if (!ard.userName.equals(userName)) {
return false;
}
if (!ard.connectionString.equals(connectionString)) {
return false;
}
if (!ard.userSearch.equals(userSearch)) {
return false;
}
if (!ard.groupSearch.equals(groupSearch)) {
return false;
}
return true;
}
}
}
|
googleapis/google-cloud-java | 36,914 | java-dataproc-metastore/proto-google-cloud-dataproc-metastore-v1alpha/src/main/java/com/google/cloud/metastore/v1alpha/KerberosConfig.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/metastore/v1alpha/metastore.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.metastore.v1alpha;
/**
*
*
* <pre>
* Configuration information for a Kerberos principal.
* </pre>
*
* Protobuf type {@code google.cloud.metastore.v1alpha.KerberosConfig}
*/
public final class KerberosConfig extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.metastore.v1alpha.KerberosConfig)
KerberosConfigOrBuilder {
private static final long serialVersionUID = 0L;
// Use KerberosConfig.newBuilder() to construct.
private KerberosConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private KerberosConfig() {
principal_ = "";
krb5ConfigGcsUri_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new KerberosConfig();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.metastore.v1alpha.MetastoreProto
.internal_static_google_cloud_metastore_v1alpha_KerberosConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.metastore.v1alpha.MetastoreProto
.internal_static_google_cloud_metastore_v1alpha_KerberosConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.metastore.v1alpha.KerberosConfig.class,
com.google.cloud.metastore.v1alpha.KerberosConfig.Builder.class);
}
private int bitField0_;
public static final int KEYTAB_FIELD_NUMBER = 1;
private com.google.cloud.metastore.v1alpha.Secret keytab_;
/**
*
*
* <pre>
* A Kerberos keytab file that can be used to authenticate a service principal
* with a Kerberos Key Distribution Center (KDC).
* </pre>
*
* <code>.google.cloud.metastore.v1alpha.Secret keytab = 1;</code>
*
* @return Whether the keytab field is set.
*/
@java.lang.Override
public boolean hasKeytab() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* A Kerberos keytab file that can be used to authenticate a service principal
* with a Kerberos Key Distribution Center (KDC).
* </pre>
*
* <code>.google.cloud.metastore.v1alpha.Secret keytab = 1;</code>
*
* @return The keytab.
*/
@java.lang.Override
public com.google.cloud.metastore.v1alpha.Secret getKeytab() {
return keytab_ == null
? com.google.cloud.metastore.v1alpha.Secret.getDefaultInstance()
: keytab_;
}
/**
*
*
* <pre>
* A Kerberos keytab file that can be used to authenticate a service principal
* with a Kerberos Key Distribution Center (KDC).
* </pre>
*
* <code>.google.cloud.metastore.v1alpha.Secret keytab = 1;</code>
*/
@java.lang.Override
public com.google.cloud.metastore.v1alpha.SecretOrBuilder getKeytabOrBuilder() {
return keytab_ == null
? com.google.cloud.metastore.v1alpha.Secret.getDefaultInstance()
: keytab_;
}
public static final int PRINCIPAL_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object principal_ = "";
/**
*
*
* <pre>
* A Kerberos principal that exists in the both the keytab the KDC
* to authenticate as. A typical principal is of the form
* `primary/instance@REALM`, but there is no exact format.
* </pre>
*
* <code>string principal = 2;</code>
*
* @return The principal.
*/
@java.lang.Override
public java.lang.String getPrincipal() {
java.lang.Object ref = principal_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
principal_ = s;
return s;
}
}
/**
*
*
* <pre>
* A Kerberos principal that exists in the both the keytab the KDC
* to authenticate as. A typical principal is of the form
* `primary/instance@REALM`, but there is no exact format.
* </pre>
*
* <code>string principal = 2;</code>
*
* @return The bytes for principal.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPrincipalBytes() {
java.lang.Object ref = principal_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
principal_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int KRB5_CONFIG_GCS_URI_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object krb5ConfigGcsUri_ = "";
/**
*
*
* <pre>
* A Cloud Storage URI that specifies the path to a
* krb5.conf file. It is of the form `gs://{bucket_name}/path/to/krb5.conf`,
* although the file does not need to be named krb5.conf explicitly.
* </pre>
*
* <code>string krb5_config_gcs_uri = 3;</code>
*
* @return The krb5ConfigGcsUri.
*/
@java.lang.Override
public java.lang.String getKrb5ConfigGcsUri() {
java.lang.Object ref = krb5ConfigGcsUri_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
krb5ConfigGcsUri_ = s;
return s;
}
}
/**
*
*
* <pre>
* A Cloud Storage URI that specifies the path to a
* krb5.conf file. It is of the form `gs://{bucket_name}/path/to/krb5.conf`,
* although the file does not need to be named krb5.conf explicitly.
* </pre>
*
* <code>string krb5_config_gcs_uri = 3;</code>
*
* @return The bytes for krb5ConfigGcsUri.
*/
@java.lang.Override
public com.google.protobuf.ByteString getKrb5ConfigGcsUriBytes() {
java.lang.Object ref = krb5ConfigGcsUri_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
krb5ConfigGcsUri_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getKeytab());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(principal_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, principal_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(krb5ConfigGcsUri_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, krb5ConfigGcsUri_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getKeytab());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(principal_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, principal_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(krb5ConfigGcsUri_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, krb5ConfigGcsUri_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.metastore.v1alpha.KerberosConfig)) {
return super.equals(obj);
}
com.google.cloud.metastore.v1alpha.KerberosConfig other =
(com.google.cloud.metastore.v1alpha.KerberosConfig) obj;
if (hasKeytab() != other.hasKeytab()) return false;
if (hasKeytab()) {
if (!getKeytab().equals(other.getKeytab())) return false;
}
if (!getPrincipal().equals(other.getPrincipal())) return false;
if (!getKrb5ConfigGcsUri().equals(other.getKrb5ConfigGcsUri())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasKeytab()) {
hash = (37 * hash) + KEYTAB_FIELD_NUMBER;
hash = (53 * hash) + getKeytab().hashCode();
}
hash = (37 * hash) + PRINCIPAL_FIELD_NUMBER;
hash = (53 * hash) + getPrincipal().hashCode();
hash = (37 * hash) + KRB5_CONFIG_GCS_URI_FIELD_NUMBER;
hash = (53 * hash) + getKrb5ConfigGcsUri().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.metastore.v1alpha.KerberosConfig parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.metastore.v1alpha.KerberosConfig parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.metastore.v1alpha.KerberosConfig parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.metastore.v1alpha.KerberosConfig parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.metastore.v1alpha.KerberosConfig parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.metastore.v1alpha.KerberosConfig parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.metastore.v1alpha.KerberosConfig parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.metastore.v1alpha.KerberosConfig parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.metastore.v1alpha.KerberosConfig parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.metastore.v1alpha.KerberosConfig parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.metastore.v1alpha.KerberosConfig parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.metastore.v1alpha.KerberosConfig parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.metastore.v1alpha.KerberosConfig prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Configuration information for a Kerberos principal.
* </pre>
*
* Protobuf type {@code google.cloud.metastore.v1alpha.KerberosConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.metastore.v1alpha.KerberosConfig)
com.google.cloud.metastore.v1alpha.KerberosConfigOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.metastore.v1alpha.MetastoreProto
.internal_static_google_cloud_metastore_v1alpha_KerberosConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.metastore.v1alpha.MetastoreProto
.internal_static_google_cloud_metastore_v1alpha_KerberosConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.metastore.v1alpha.KerberosConfig.class,
com.google.cloud.metastore.v1alpha.KerberosConfig.Builder.class);
}
// Construct using com.google.cloud.metastore.v1alpha.KerberosConfig.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getKeytabFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
keytab_ = null;
if (keytabBuilder_ != null) {
keytabBuilder_.dispose();
keytabBuilder_ = null;
}
principal_ = "";
krb5ConfigGcsUri_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.metastore.v1alpha.MetastoreProto
.internal_static_google_cloud_metastore_v1alpha_KerberosConfig_descriptor;
}
@java.lang.Override
public com.google.cloud.metastore.v1alpha.KerberosConfig getDefaultInstanceForType() {
return com.google.cloud.metastore.v1alpha.KerberosConfig.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.metastore.v1alpha.KerberosConfig build() {
com.google.cloud.metastore.v1alpha.KerberosConfig result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.metastore.v1alpha.KerberosConfig buildPartial() {
com.google.cloud.metastore.v1alpha.KerberosConfig result =
new com.google.cloud.metastore.v1alpha.KerberosConfig(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.metastore.v1alpha.KerberosConfig result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.keytab_ = keytabBuilder_ == null ? keytab_ : keytabBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.principal_ = principal_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.krb5ConfigGcsUri_ = krb5ConfigGcsUri_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.metastore.v1alpha.KerberosConfig) {
return mergeFrom((com.google.cloud.metastore.v1alpha.KerberosConfig) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.metastore.v1alpha.KerberosConfig other) {
if (other == com.google.cloud.metastore.v1alpha.KerberosConfig.getDefaultInstance())
return this;
if (other.hasKeytab()) {
mergeKeytab(other.getKeytab());
}
if (!other.getPrincipal().isEmpty()) {
principal_ = other.principal_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getKrb5ConfigGcsUri().isEmpty()) {
krb5ConfigGcsUri_ = other.krb5ConfigGcsUri_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getKeytabFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
principal_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
krb5ConfigGcsUri_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.cloud.metastore.v1alpha.Secret keytab_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.metastore.v1alpha.Secret,
com.google.cloud.metastore.v1alpha.Secret.Builder,
com.google.cloud.metastore.v1alpha.SecretOrBuilder>
keytabBuilder_;
/**
*
*
* <pre>
* A Kerberos keytab file that can be used to authenticate a service principal
* with a Kerberos Key Distribution Center (KDC).
* </pre>
*
* <code>.google.cloud.metastore.v1alpha.Secret keytab = 1;</code>
*
* @return Whether the keytab field is set.
*/
public boolean hasKeytab() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* A Kerberos keytab file that can be used to authenticate a service principal
* with a Kerberos Key Distribution Center (KDC).
* </pre>
*
* <code>.google.cloud.metastore.v1alpha.Secret keytab = 1;</code>
*
* @return The keytab.
*/
public com.google.cloud.metastore.v1alpha.Secret getKeytab() {
if (keytabBuilder_ == null) {
return keytab_ == null
? com.google.cloud.metastore.v1alpha.Secret.getDefaultInstance()
: keytab_;
} else {
return keytabBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* A Kerberos keytab file that can be used to authenticate a service principal
* with a Kerberos Key Distribution Center (KDC).
* </pre>
*
* <code>.google.cloud.metastore.v1alpha.Secret keytab = 1;</code>
*/
public Builder setKeytab(com.google.cloud.metastore.v1alpha.Secret value) {
if (keytabBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
keytab_ = value;
} else {
keytabBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* A Kerberos keytab file that can be used to authenticate a service principal
* with a Kerberos Key Distribution Center (KDC).
* </pre>
*
* <code>.google.cloud.metastore.v1alpha.Secret keytab = 1;</code>
*/
public Builder setKeytab(com.google.cloud.metastore.v1alpha.Secret.Builder builderForValue) {
if (keytabBuilder_ == null) {
keytab_ = builderForValue.build();
} else {
keytabBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* A Kerberos keytab file that can be used to authenticate a service principal
* with a Kerberos Key Distribution Center (KDC).
* </pre>
*
* <code>.google.cloud.metastore.v1alpha.Secret keytab = 1;</code>
*/
public Builder mergeKeytab(com.google.cloud.metastore.v1alpha.Secret value) {
if (keytabBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& keytab_ != null
&& keytab_ != com.google.cloud.metastore.v1alpha.Secret.getDefaultInstance()) {
getKeytabBuilder().mergeFrom(value);
} else {
keytab_ = value;
}
} else {
keytabBuilder_.mergeFrom(value);
}
if (keytab_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* A Kerberos keytab file that can be used to authenticate a service principal
* with a Kerberos Key Distribution Center (KDC).
* </pre>
*
* <code>.google.cloud.metastore.v1alpha.Secret keytab = 1;</code>
*/
public Builder clearKeytab() {
bitField0_ = (bitField0_ & ~0x00000001);
keytab_ = null;
if (keytabBuilder_ != null) {
keytabBuilder_.dispose();
keytabBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* A Kerberos keytab file that can be used to authenticate a service principal
* with a Kerberos Key Distribution Center (KDC).
* </pre>
*
* <code>.google.cloud.metastore.v1alpha.Secret keytab = 1;</code>
*/
public com.google.cloud.metastore.v1alpha.Secret.Builder getKeytabBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getKeytabFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* A Kerberos keytab file that can be used to authenticate a service principal
* with a Kerberos Key Distribution Center (KDC).
* </pre>
*
* <code>.google.cloud.metastore.v1alpha.Secret keytab = 1;</code>
*/
public com.google.cloud.metastore.v1alpha.SecretOrBuilder getKeytabOrBuilder() {
if (keytabBuilder_ != null) {
return keytabBuilder_.getMessageOrBuilder();
} else {
return keytab_ == null
? com.google.cloud.metastore.v1alpha.Secret.getDefaultInstance()
: keytab_;
}
}
/**
*
*
* <pre>
* A Kerberos keytab file that can be used to authenticate a service principal
* with a Kerberos Key Distribution Center (KDC).
* </pre>
*
* <code>.google.cloud.metastore.v1alpha.Secret keytab = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.metastore.v1alpha.Secret,
com.google.cloud.metastore.v1alpha.Secret.Builder,
com.google.cloud.metastore.v1alpha.SecretOrBuilder>
getKeytabFieldBuilder() {
if (keytabBuilder_ == null) {
keytabBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.metastore.v1alpha.Secret,
com.google.cloud.metastore.v1alpha.Secret.Builder,
com.google.cloud.metastore.v1alpha.SecretOrBuilder>(
getKeytab(), getParentForChildren(), isClean());
keytab_ = null;
}
return keytabBuilder_;
}
private java.lang.Object principal_ = "";
/**
*
*
* <pre>
* A Kerberos principal that exists in the both the keytab the KDC
* to authenticate as. A typical principal is of the form
* `primary/instance@REALM`, but there is no exact format.
* </pre>
*
* <code>string principal = 2;</code>
*
* @return The principal.
*/
public java.lang.String getPrincipal() {
java.lang.Object ref = principal_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
principal_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A Kerberos principal that exists in the both the keytab the KDC
* to authenticate as. A typical principal is of the form
* `primary/instance@REALM`, but there is no exact format.
* </pre>
*
* <code>string principal = 2;</code>
*
* @return The bytes for principal.
*/
public com.google.protobuf.ByteString getPrincipalBytes() {
java.lang.Object ref = principal_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
principal_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A Kerberos principal that exists in the both the keytab the KDC
* to authenticate as. A typical principal is of the form
* `primary/instance@REALM`, but there is no exact format.
* </pre>
*
* <code>string principal = 2;</code>
*
* @param value The principal to set.
* @return This builder for chaining.
*/
public Builder setPrincipal(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
principal_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A Kerberos principal that exists in the both the keytab the KDC
* to authenticate as. A typical principal is of the form
* `primary/instance@REALM`, but there is no exact format.
* </pre>
*
* <code>string principal = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPrincipal() {
principal_ = getDefaultInstance().getPrincipal();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A Kerberos principal that exists in the both the keytab the KDC
* to authenticate as. A typical principal is of the form
* `primary/instance@REALM`, but there is no exact format.
* </pre>
*
* <code>string principal = 2;</code>
*
* @param value The bytes for principal to set.
* @return This builder for chaining.
*/
public Builder setPrincipalBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
principal_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object krb5ConfigGcsUri_ = "";
/**
*
*
* <pre>
* A Cloud Storage URI that specifies the path to a
* krb5.conf file. It is of the form `gs://{bucket_name}/path/to/krb5.conf`,
* although the file does not need to be named krb5.conf explicitly.
* </pre>
*
* <code>string krb5_config_gcs_uri = 3;</code>
*
* @return The krb5ConfigGcsUri.
*/
public java.lang.String getKrb5ConfigGcsUri() {
java.lang.Object ref = krb5ConfigGcsUri_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
krb5ConfigGcsUri_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A Cloud Storage URI that specifies the path to a
* krb5.conf file. It is of the form `gs://{bucket_name}/path/to/krb5.conf`,
* although the file does not need to be named krb5.conf explicitly.
* </pre>
*
* <code>string krb5_config_gcs_uri = 3;</code>
*
* @return The bytes for krb5ConfigGcsUri.
*/
public com.google.protobuf.ByteString getKrb5ConfigGcsUriBytes() {
java.lang.Object ref = krb5ConfigGcsUri_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
krb5ConfigGcsUri_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A Cloud Storage URI that specifies the path to a
* krb5.conf file. It is of the form `gs://{bucket_name}/path/to/krb5.conf`,
* although the file does not need to be named krb5.conf explicitly.
* </pre>
*
* <code>string krb5_config_gcs_uri = 3;</code>
*
* @param value The krb5ConfigGcsUri to set.
* @return This builder for chaining.
*/
public Builder setKrb5ConfigGcsUri(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
krb5ConfigGcsUri_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* A Cloud Storage URI that specifies the path to a
* krb5.conf file. It is of the form `gs://{bucket_name}/path/to/krb5.conf`,
* although the file does not need to be named krb5.conf explicitly.
* </pre>
*
* <code>string krb5_config_gcs_uri = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearKrb5ConfigGcsUri() {
krb5ConfigGcsUri_ = getDefaultInstance().getKrb5ConfigGcsUri();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* A Cloud Storage URI that specifies the path to a
* krb5.conf file. It is of the form `gs://{bucket_name}/path/to/krb5.conf`,
* although the file does not need to be named krb5.conf explicitly.
* </pre>
*
* <code>string krb5_config_gcs_uri = 3;</code>
*
* @param value The bytes for krb5ConfigGcsUri to set.
* @return This builder for chaining.
*/
public Builder setKrb5ConfigGcsUriBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
krb5ConfigGcsUri_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.metastore.v1alpha.KerberosConfig)
}
// @@protoc_insertion_point(class_scope:google.cloud.metastore.v1alpha.KerberosConfig)
private static final com.google.cloud.metastore.v1alpha.KerberosConfig DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.metastore.v1alpha.KerberosConfig();
}
public static com.google.cloud.metastore.v1alpha.KerberosConfig getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<KerberosConfig> PARSER =
new com.google.protobuf.AbstractParser<KerberosConfig>() {
@java.lang.Override
public KerberosConfig parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<KerberosConfig> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<KerberosConfig> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.metastore.v1alpha.KerberosConfig getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,923 | java-discoveryengine/proto-google-cloud-discoveryengine-v1/src/main/java/com/google/cloud/discoveryengine/v1/UserInfo.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/discoveryengine/v1/common.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.discoveryengine.v1;
/**
*
*
* <pre>
* Information of an end user.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1.UserInfo}
*/
public final class UserInfo extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1.UserInfo)
UserInfoOrBuilder {
private static final long serialVersionUID = 0L;
// Use UserInfo.newBuilder() to construct.
private UserInfo(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UserInfo() {
userId_ = "";
userAgent_ = "";
timeZone_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UserInfo();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1.CommonProto
.internal_static_google_cloud_discoveryengine_v1_UserInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1.CommonProto
.internal_static_google_cloud_discoveryengine_v1_UserInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1.UserInfo.class,
com.google.cloud.discoveryengine.v1.UserInfo.Builder.class);
}
public static final int USER_ID_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object userId_ = "";
/**
*
*
* <pre>
* Highly recommended for logged-in users. Unique identifier for logged-in
* user, such as a user name. Don't set for anonymous users.
*
* Always use a hashed value for this ID.
*
* Don't set the field to the same fixed ID for different users. This mixes
* the event history of those users together, which results in degraded
* model quality.
*
* The field must be a UTF-8 encoded string with a length limit of 128
* characters. Otherwise, an `INVALID_ARGUMENT` error is returned.
* </pre>
*
* <code>string user_id = 1;</code>
*
* @return The userId.
*/
@java.lang.Override
public java.lang.String getUserId() {
java.lang.Object ref = userId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
userId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Highly recommended for logged-in users. Unique identifier for logged-in
* user, such as a user name. Don't set for anonymous users.
*
* Always use a hashed value for this ID.
*
* Don't set the field to the same fixed ID for different users. This mixes
* the event history of those users together, which results in degraded
* model quality.
*
* The field must be a UTF-8 encoded string with a length limit of 128
* characters. Otherwise, an `INVALID_ARGUMENT` error is returned.
* </pre>
*
* <code>string user_id = 1;</code>
*
* @return The bytes for userId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getUserIdBytes() {
java.lang.Object ref = userId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
userId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int USER_AGENT_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object userAgent_ = "";
/**
*
*
* <pre>
* User agent as included in the HTTP header.
*
* The field must be a UTF-8 encoded string with a length limit of 1,000
* characters. Otherwise, an `INVALID_ARGUMENT` error is returned.
*
* This should not be set when using the client side event reporting with
* GTM or JavaScript tag in
* [UserEventService.CollectUserEvent][google.cloud.discoveryengine.v1.UserEventService.CollectUserEvent]
* or if
* [UserEvent.direct_user_request][google.cloud.discoveryengine.v1.UserEvent.direct_user_request]
* is set.
* </pre>
*
* <code>string user_agent = 2;</code>
*
* @return The userAgent.
*/
@java.lang.Override
public java.lang.String getUserAgent() {
java.lang.Object ref = userAgent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
userAgent_ = s;
return s;
}
}
/**
*
*
* <pre>
* User agent as included in the HTTP header.
*
* The field must be a UTF-8 encoded string with a length limit of 1,000
* characters. Otherwise, an `INVALID_ARGUMENT` error is returned.
*
* This should not be set when using the client side event reporting with
* GTM or JavaScript tag in
* [UserEventService.CollectUserEvent][google.cloud.discoveryengine.v1.UserEventService.CollectUserEvent]
* or if
* [UserEvent.direct_user_request][google.cloud.discoveryengine.v1.UserEvent.direct_user_request]
* is set.
* </pre>
*
* <code>string user_agent = 2;</code>
*
* @return The bytes for userAgent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getUserAgentBytes() {
java.lang.Object ref = userAgent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
userAgent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int TIME_ZONE_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object timeZone_ = "";
/**
*
*
* <pre>
* Optional. IANA time zone, e.g. Europe/Budapest.
* </pre>
*
* <code>string time_zone = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The timeZone.
*/
@java.lang.Override
public java.lang.String getTimeZone() {
java.lang.Object ref = timeZone_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
timeZone_ = s;
return s;
}
}
/**
*
*
* <pre>
* Optional. IANA time zone, e.g. Europe/Budapest.
* </pre>
*
* <code>string time_zone = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for timeZone.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTimeZoneBytes() {
java.lang.Object ref = timeZone_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
timeZone_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(userId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, userId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(userAgent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, userAgent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(timeZone_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, timeZone_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(userId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, userId_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(userAgent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, userAgent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(timeZone_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, timeZone_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.discoveryengine.v1.UserInfo)) {
return super.equals(obj);
}
com.google.cloud.discoveryengine.v1.UserInfo other =
(com.google.cloud.discoveryengine.v1.UserInfo) obj;
if (!getUserId().equals(other.getUserId())) return false;
if (!getUserAgent().equals(other.getUserAgent())) return false;
if (!getTimeZone().equals(other.getTimeZone())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + USER_ID_FIELD_NUMBER;
hash = (53 * hash) + getUserId().hashCode();
hash = (37 * hash) + USER_AGENT_FIELD_NUMBER;
hash = (53 * hash) + getUserAgent().hashCode();
hash = (37 * hash) + TIME_ZONE_FIELD_NUMBER;
hash = (53 * hash) + getTimeZone().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.discoveryengine.v1.UserInfo parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.UserInfo parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.UserInfo parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.UserInfo parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.UserInfo parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.UserInfo parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.UserInfo parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.UserInfo parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.UserInfo parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.UserInfo parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.UserInfo parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.UserInfo parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.discoveryengine.v1.UserInfo prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Information of an end user.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1.UserInfo}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1.UserInfo)
com.google.cloud.discoveryengine.v1.UserInfoOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1.CommonProto
.internal_static_google_cloud_discoveryengine_v1_UserInfo_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1.CommonProto
.internal_static_google_cloud_discoveryengine_v1_UserInfo_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1.UserInfo.class,
com.google.cloud.discoveryengine.v1.UserInfo.Builder.class);
}
// Construct using com.google.cloud.discoveryengine.v1.UserInfo.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
userId_ = "";
userAgent_ = "";
timeZone_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.discoveryengine.v1.CommonProto
.internal_static_google_cloud_discoveryengine_v1_UserInfo_descriptor;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.UserInfo getDefaultInstanceForType() {
return com.google.cloud.discoveryengine.v1.UserInfo.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.UserInfo build() {
com.google.cloud.discoveryengine.v1.UserInfo result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.UserInfo buildPartial() {
com.google.cloud.discoveryengine.v1.UserInfo result =
new com.google.cloud.discoveryengine.v1.UserInfo(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.discoveryengine.v1.UserInfo result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.userId_ = userId_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.userAgent_ = userAgent_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.timeZone_ = timeZone_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.discoveryengine.v1.UserInfo) {
return mergeFrom((com.google.cloud.discoveryengine.v1.UserInfo) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.discoveryengine.v1.UserInfo other) {
if (other == com.google.cloud.discoveryengine.v1.UserInfo.getDefaultInstance()) return this;
if (!other.getUserId().isEmpty()) {
userId_ = other.userId_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getUserAgent().isEmpty()) {
userAgent_ = other.userAgent_;
bitField0_ |= 0x00000002;
onChanged();
}
if (!other.getTimeZone().isEmpty()) {
timeZone_ = other.timeZone_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
userId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
userAgent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
timeZone_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object userId_ = "";
/**
*
*
* <pre>
* Highly recommended for logged-in users. Unique identifier for logged-in
* user, such as a user name. Don't set for anonymous users.
*
* Always use a hashed value for this ID.
*
* Don't set the field to the same fixed ID for different users. This mixes
* the event history of those users together, which results in degraded
* model quality.
*
* The field must be a UTF-8 encoded string with a length limit of 128
* characters. Otherwise, an `INVALID_ARGUMENT` error is returned.
* </pre>
*
* <code>string user_id = 1;</code>
*
* @return The userId.
*/
public java.lang.String getUserId() {
java.lang.Object ref = userId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
userId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Highly recommended for logged-in users. Unique identifier for logged-in
* user, such as a user name. Don't set for anonymous users.
*
* Always use a hashed value for this ID.
*
* Don't set the field to the same fixed ID for different users. This mixes
* the event history of those users together, which results in degraded
* model quality.
*
* The field must be a UTF-8 encoded string with a length limit of 128
* characters. Otherwise, an `INVALID_ARGUMENT` error is returned.
* </pre>
*
* <code>string user_id = 1;</code>
*
* @return The bytes for userId.
*/
public com.google.protobuf.ByteString getUserIdBytes() {
java.lang.Object ref = userId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
userId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Highly recommended for logged-in users. Unique identifier for logged-in
* user, such as a user name. Don't set for anonymous users.
*
* Always use a hashed value for this ID.
*
* Don't set the field to the same fixed ID for different users. This mixes
* the event history of those users together, which results in degraded
* model quality.
*
* The field must be a UTF-8 encoded string with a length limit of 128
* characters. Otherwise, an `INVALID_ARGUMENT` error is returned.
* </pre>
*
* <code>string user_id = 1;</code>
*
* @param value The userId to set.
* @return This builder for chaining.
*/
public Builder setUserId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
userId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Highly recommended for logged-in users. Unique identifier for logged-in
* user, such as a user name. Don't set for anonymous users.
*
* Always use a hashed value for this ID.
*
* Don't set the field to the same fixed ID for different users. This mixes
* the event history of those users together, which results in degraded
* model quality.
*
* The field must be a UTF-8 encoded string with a length limit of 128
* characters. Otherwise, an `INVALID_ARGUMENT` error is returned.
* </pre>
*
* <code>string user_id = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearUserId() {
userId_ = getDefaultInstance().getUserId();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Highly recommended for logged-in users. Unique identifier for logged-in
* user, such as a user name. Don't set for anonymous users.
*
* Always use a hashed value for this ID.
*
* Don't set the field to the same fixed ID for different users. This mixes
* the event history of those users together, which results in degraded
* model quality.
*
* The field must be a UTF-8 encoded string with a length limit of 128
* characters. Otherwise, an `INVALID_ARGUMENT` error is returned.
* </pre>
*
* <code>string user_id = 1;</code>
*
* @param value The bytes for userId to set.
* @return This builder for chaining.
*/
public Builder setUserIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
userId_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object userAgent_ = "";
/**
*
*
* <pre>
* User agent as included in the HTTP header.
*
* The field must be a UTF-8 encoded string with a length limit of 1,000
* characters. Otherwise, an `INVALID_ARGUMENT` error is returned.
*
* This should not be set when using the client side event reporting with
* GTM or JavaScript tag in
* [UserEventService.CollectUserEvent][google.cloud.discoveryengine.v1.UserEventService.CollectUserEvent]
* or if
* [UserEvent.direct_user_request][google.cloud.discoveryengine.v1.UserEvent.direct_user_request]
* is set.
* </pre>
*
* <code>string user_agent = 2;</code>
*
* @return The userAgent.
*/
public java.lang.String getUserAgent() {
java.lang.Object ref = userAgent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
userAgent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* User agent as included in the HTTP header.
*
* The field must be a UTF-8 encoded string with a length limit of 1,000
* characters. Otherwise, an `INVALID_ARGUMENT` error is returned.
*
* This should not be set when using the client side event reporting with
* GTM or JavaScript tag in
* [UserEventService.CollectUserEvent][google.cloud.discoveryengine.v1.UserEventService.CollectUserEvent]
* or if
* [UserEvent.direct_user_request][google.cloud.discoveryengine.v1.UserEvent.direct_user_request]
* is set.
* </pre>
*
* <code>string user_agent = 2;</code>
*
* @return The bytes for userAgent.
*/
public com.google.protobuf.ByteString getUserAgentBytes() {
java.lang.Object ref = userAgent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
userAgent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* User agent as included in the HTTP header.
*
* The field must be a UTF-8 encoded string with a length limit of 1,000
* characters. Otherwise, an `INVALID_ARGUMENT` error is returned.
*
* This should not be set when using the client side event reporting with
* GTM or JavaScript tag in
* [UserEventService.CollectUserEvent][google.cloud.discoveryengine.v1.UserEventService.CollectUserEvent]
* or if
* [UserEvent.direct_user_request][google.cloud.discoveryengine.v1.UserEvent.direct_user_request]
* is set.
* </pre>
*
* <code>string user_agent = 2;</code>
*
* @param value The userAgent to set.
* @return This builder for chaining.
*/
public Builder setUserAgent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
userAgent_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* User agent as included in the HTTP header.
*
* The field must be a UTF-8 encoded string with a length limit of 1,000
* characters. Otherwise, an `INVALID_ARGUMENT` error is returned.
*
* This should not be set when using the client side event reporting with
* GTM or JavaScript tag in
* [UserEventService.CollectUserEvent][google.cloud.discoveryengine.v1.UserEventService.CollectUserEvent]
* or if
* [UserEvent.direct_user_request][google.cloud.discoveryengine.v1.UserEvent.direct_user_request]
* is set.
* </pre>
*
* <code>string user_agent = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearUserAgent() {
userAgent_ = getDefaultInstance().getUserAgent();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* User agent as included in the HTTP header.
*
* The field must be a UTF-8 encoded string with a length limit of 1,000
* characters. Otherwise, an `INVALID_ARGUMENT` error is returned.
*
* This should not be set when using the client side event reporting with
* GTM or JavaScript tag in
* [UserEventService.CollectUserEvent][google.cloud.discoveryengine.v1.UserEventService.CollectUserEvent]
* or if
* [UserEvent.direct_user_request][google.cloud.discoveryengine.v1.UserEvent.direct_user_request]
* is set.
* </pre>
*
* <code>string user_agent = 2;</code>
*
* @param value The bytes for userAgent to set.
* @return This builder for chaining.
*/
public Builder setUserAgentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
userAgent_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private java.lang.Object timeZone_ = "";
/**
*
*
* <pre>
* Optional. IANA time zone, e.g. Europe/Budapest.
* </pre>
*
* <code>string time_zone = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The timeZone.
*/
public java.lang.String getTimeZone() {
java.lang.Object ref = timeZone_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
timeZone_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Optional. IANA time zone, e.g. Europe/Budapest.
* </pre>
*
* <code>string time_zone = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return The bytes for timeZone.
*/
public com.google.protobuf.ByteString getTimeZoneBytes() {
java.lang.Object ref = timeZone_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
timeZone_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Optional. IANA time zone, e.g. Europe/Budapest.
* </pre>
*
* <code>string time_zone = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The timeZone to set.
* @return This builder for chaining.
*/
public Builder setTimeZone(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
timeZone_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. IANA time zone, e.g. Europe/Budapest.
* </pre>
*
* <code>string time_zone = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @return This builder for chaining.
*/
public Builder clearTimeZone() {
timeZone_ = getDefaultInstance().getTimeZone();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. IANA time zone, e.g. Europe/Budapest.
* </pre>
*
* <code>string time_zone = 3 [(.google.api.field_behavior) = OPTIONAL];</code>
*
* @param value The bytes for timeZone to set.
* @return This builder for chaining.
*/
public Builder setTimeZoneBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
timeZone_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1.UserInfo)
}
// @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1.UserInfo)
private static final com.google.cloud.discoveryengine.v1.UserInfo DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1.UserInfo();
}
public static com.google.cloud.discoveryengine.v1.UserInfo getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UserInfo> PARSER =
new com.google.protobuf.AbstractParser<UserInfo>() {
@java.lang.Override
public UserInfo parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UserInfo> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UserInfo> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.UserInfo getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/dubbo | 37,074 | dubbo-config/dubbo-config-api/src/main/java/org/apache/dubbo/config/ReferenceConfig.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.dubbo.config;
import org.apache.dubbo.common.URL;
import org.apache.dubbo.common.Version;
import org.apache.dubbo.common.constants.CommonConstants;
import org.apache.dubbo.common.constants.LoggerCodeConstants;
import org.apache.dubbo.common.constants.RegistryConstants;
import org.apache.dubbo.common.extension.ExtensionLoader;
import org.apache.dubbo.common.logger.ErrorTypeAwareLogger;
import org.apache.dubbo.common.logger.LoggerFactory;
import org.apache.dubbo.common.url.component.ServiceConfigURL;
import org.apache.dubbo.common.utils.ArrayUtils;
import org.apache.dubbo.common.utils.CollectionUtils;
import org.apache.dubbo.common.utils.ConfigUtils;
import org.apache.dubbo.common.utils.NetUtils;
import org.apache.dubbo.common.utils.StringUtils;
import org.apache.dubbo.common.utils.UrlUtils;
import org.apache.dubbo.config.annotation.Reference;
import org.apache.dubbo.config.support.Parameter;
import org.apache.dubbo.config.utils.ConfigValidationUtils;
import org.apache.dubbo.registry.client.metadata.MetadataUtils;
import org.apache.dubbo.rpc.Invoker;
import org.apache.dubbo.rpc.Protocol;
import org.apache.dubbo.rpc.ProxyFactory;
import org.apache.dubbo.rpc.cluster.Cluster;
import org.apache.dubbo.rpc.cluster.directory.StaticDirectory;
import org.apache.dubbo.rpc.cluster.support.ClusterUtils;
import org.apache.dubbo.rpc.cluster.support.registry.ZoneAwareCluster;
import org.apache.dubbo.rpc.model.AsyncMethodInfo;
import org.apache.dubbo.rpc.model.ConsumerModel;
import org.apache.dubbo.rpc.model.DubboStub;
import org.apache.dubbo.rpc.model.ModuleModel;
import org.apache.dubbo.rpc.model.ModuleServiceRepository;
import org.apache.dubbo.rpc.model.ScopeModel;
import org.apache.dubbo.rpc.model.ServiceDescriptor;
import org.apache.dubbo.rpc.protocol.injvm.InjvmProtocol;
import org.apache.dubbo.rpc.service.GenericService;
import org.apache.dubbo.rpc.stub.StubSuppliers;
import org.apache.dubbo.rpc.support.ProtocolUtils;
import java.beans.Transient;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.TreeSet;
import java.util.concurrent.locks.ReentrantLock;
import static org.apache.dubbo.common.constants.CommonConstants.ANY_VALUE;
import static org.apache.dubbo.common.constants.CommonConstants.CLUSTER_DOMAIN;
import static org.apache.dubbo.common.constants.CommonConstants.CLUSTER_KEY;
import static org.apache.dubbo.common.constants.CommonConstants.COMMA_SEPARATOR;
import static org.apache.dubbo.common.constants.CommonConstants.COMMA_SEPARATOR_CHAR;
import static org.apache.dubbo.common.constants.CommonConstants.CONSUMER_SIDE;
import static org.apache.dubbo.common.constants.CommonConstants.DEFAULT_CLUSTER_DOMAIN;
import static org.apache.dubbo.common.constants.CommonConstants.DEFAULT_MESH_PORT;
import static org.apache.dubbo.common.constants.CommonConstants.DubboProperty.DUBBO_IP_TO_REGISTRY;
import static org.apache.dubbo.common.constants.CommonConstants.INTERFACE_KEY;
import static org.apache.dubbo.common.constants.CommonConstants.LOCALHOST_VALUE;
import static org.apache.dubbo.common.constants.CommonConstants.MESH_ENABLE;
import static org.apache.dubbo.common.constants.CommonConstants.METHODS_KEY;
import static org.apache.dubbo.common.constants.CommonConstants.MONITOR_KEY;
import static org.apache.dubbo.common.constants.CommonConstants.POD_NAMESPACE;
import static org.apache.dubbo.common.constants.CommonConstants.PROXY_CLASS_REF;
import static org.apache.dubbo.common.constants.CommonConstants.REVISION_KEY;
import static org.apache.dubbo.common.constants.CommonConstants.SEMICOLON_SPLIT_PATTERN;
import static org.apache.dubbo.common.constants.CommonConstants.SIDE_KEY;
import static org.apache.dubbo.common.constants.CommonConstants.SVC;
import static org.apache.dubbo.common.constants.CommonConstants.TRIPLE;
import static org.apache.dubbo.common.constants.CommonConstants.UNLOAD_CLUSTER_RELATED;
import static org.apache.dubbo.common.constants.LoggerCodeConstants.CLUSTER_NO_VALID_PROVIDER;
import static org.apache.dubbo.common.constants.LoggerCodeConstants.CONFIG_FAILED_DESTROY_INVOKER;
import static org.apache.dubbo.common.constants.LoggerCodeConstants.CONFIG_FAILED_LOAD_ENV_VARIABLE;
import static org.apache.dubbo.common.constants.LoggerCodeConstants.CONFIG_NO_METHOD_FOUND;
import static org.apache.dubbo.common.constants.LoggerCodeConstants.CONFIG_PROPERTY_CONFLICT;
import static org.apache.dubbo.common.constants.RegistryConstants.PROVIDED_BY;
import static org.apache.dubbo.common.constants.RegistryConstants.SUBSCRIBED_SERVICE_NAMES_KEY;
import static org.apache.dubbo.common.utils.NetUtils.isInvalidLocalHost;
import static org.apache.dubbo.common.utils.StringUtils.splitToSet;
import static org.apache.dubbo.registry.Constants.CONSUMER_PROTOCOL;
import static org.apache.dubbo.registry.Constants.REGISTER_IP_KEY;
import static org.apache.dubbo.rpc.Constants.GENERIC_KEY;
import static org.apache.dubbo.rpc.Constants.LOCAL_PROTOCOL;
import static org.apache.dubbo.rpc.cluster.Constants.PEER_KEY;
import static org.apache.dubbo.rpc.cluster.Constants.REFER_KEY;
/**
* Please avoid using this class for any new application,
* use {@link ReferenceConfigBase} instead.
*/
public class ReferenceConfig<T> extends ReferenceConfigBase<T> {
public static final ErrorTypeAwareLogger logger = LoggerFactory.getErrorTypeAwareLogger(ReferenceConfig.class);
/**
* The {@link Protocol} implementation with adaptive functionality,it will be different in different scenarios.
* A particular {@link Protocol} implementation is determined by the protocol attribute in the {@link URL}.
* For example:
*
* <li>when the url is registry://224.5.6.7:1234/org.apache.dubbo.registry.RegistryService?application=dubbo-sample,
* then the protocol is <b>RegistryProtocol</b></li>
*
* <li>when the url is dubbo://224.5.6.7:1234/org.apache.dubbo.config.api.DemoService?application=dubbo-sample, then
* the protocol is <b>DubboProtocol</b></li>
* <p>
* Actually,when the {@link ExtensionLoader} init the {@link Protocol} instants,it will automatically wrap three
* layers, and eventually will get a <b>ProtocolSerializationWrapper</b> or <b>ProtocolFilterWrapper</b> or <b>ProtocolListenerWrapper</b>
*/
private Protocol protocolSPI;
/**
* A {@link ProxyFactory} implementation that will generate a reference service's proxy,the JavassistProxyFactory is
* its default implementation
*/
private ProxyFactory proxyFactory;
private ConsumerModel consumerModel;
/**
* The interface proxy reference
*/
private transient volatile T ref;
/**
* The invoker of the reference service
*/
private transient volatile Invoker<?> invoker;
/**
* The flag whether the ReferenceConfig has been initialized
*/
private transient volatile boolean initialized;
/**
* whether this ReferenceConfig has been destroyed
*/
private transient volatile boolean destroyed;
/**
* The service names that the Dubbo interface subscribed.
*
* @since 2.7.8
*/
private String services;
protected final transient ReentrantLock lock = new ReentrantLock();
public ReferenceConfig() {
super();
}
public ReferenceConfig(ModuleModel moduleModel) {
super(moduleModel);
}
public ReferenceConfig(Reference reference) {
super(reference);
}
public ReferenceConfig(ModuleModel moduleModel, Reference reference) {
super(moduleModel, reference);
}
@Override
protected void postProcessAfterScopeModelChanged(ScopeModel oldScopeModel, ScopeModel newScopeModel) {
super.postProcessAfterScopeModelChanged(oldScopeModel, newScopeModel);
protocolSPI = this.getExtensionLoader(Protocol.class).getAdaptiveExtension();
proxyFactory = this.getExtensionLoader(ProxyFactory.class).getAdaptiveExtension();
}
/**
* Get a string presenting the service names that the Dubbo interface subscribed.
* If it is a multiple-values, the content will be a comma-delimited String.
*
* @return non-null
* @see RegistryConstants#SUBSCRIBED_SERVICE_NAMES_KEY
* @since 2.7.8
*/
@Deprecated
@Parameter(key = SUBSCRIBED_SERVICE_NAMES_KEY)
public String getServices() {
return services;
}
/**
* It's an alias method for {@link #getServices()}, but the more convenient.
*
* @return the String {@link List} presenting the Dubbo interface subscribed
* @since 2.7.8
*/
@Deprecated
@Parameter(excluded = true)
public Set<String> getSubscribedServices() {
return splitToSet(getServices(), COMMA_SEPARATOR_CHAR);
}
/**
* Set the service names that the Dubbo interface subscribed.
*
* @param services If it is a multiple-values, the content will be a comma-delimited String.
* @since 2.7.8
*/
public void setServices(String services) {
this.services = services;
}
@Override
@Transient
public T get(boolean check) {
if (destroyed) {
throw new IllegalStateException("The invoker of ReferenceConfig(" + url + ") has already destroyed!");
}
if (ref == null) {
if (getScopeModel().isLifeCycleManagedExternally()) {
// prepare model for reference
getScopeModel().getDeployer().prepare();
} else {
// ensure start module, compatible with old api usage
getScopeModel().getDeployer().start();
}
init(check);
}
return ref;
}
@Override
public void checkOrDestroy(long timeout) {
if (!initialized || ref == null) {
return;
}
try {
checkInvokerAvailable(timeout);
} catch (Throwable t) {
logAndCleanup(t);
throw t;
}
}
private void logAndCleanup(Throwable t) {
try {
if (invoker != null) {
invoker.destroy();
}
} catch (Throwable destroy) {
logger.warn(
CONFIG_FAILED_DESTROY_INVOKER,
"",
"",
"Unexpected error occurred when destroy invoker of ReferenceConfig(" + url + ").",
t);
}
if (consumerModel != null) {
ModuleServiceRepository repository = getScopeModel().getServiceRepository();
repository.unregisterConsumer(consumerModel);
}
initialized = false;
invoker = null;
ref = null;
consumerModel = null;
serviceMetadata.setTarget(null);
serviceMetadata.getAttributeMap().remove(PROXY_CLASS_REF);
// Thrown by checkInvokerAvailable().
if (t.getClass() == IllegalStateException.class
&& t.getMessage().contains("No provider available for the service")) {
// 2-2 - No provider available.
logger.error(CLUSTER_NO_VALID_PROVIDER, "server crashed", "", "No provider available.", t);
}
}
@Override
public void destroy() {
lock.lock();
try {
super.destroy();
if (destroyed) {
return;
}
destroyed = true;
try {
if (invoker != null) {
invoker.destroy();
}
} catch (Throwable t) {
logger.warn(
CONFIG_FAILED_DESTROY_INVOKER,
"",
"",
"Unexpected error occurred when destroy invoker of ReferenceConfig(" + url + ").",
t);
}
invoker = null;
ref = null;
if (consumerModel != null) {
ModuleServiceRepository repository = getScopeModel().getServiceRepository();
repository.unregisterConsumer(consumerModel);
}
} finally {
lock.unlock();
}
}
protected void init() {
init(true);
}
protected void init(boolean check) {
lock.lock();
try {
if (initialized && ref != null) {
return;
}
try {
if (!this.isRefreshed()) {
this.refresh();
}
// auto detect proxy type
String proxyType = getProxy();
if (StringUtils.isBlank(proxyType) && DubboStub.class.isAssignableFrom(interfaceClass)) {
setProxy(CommonConstants.NATIVE_STUB);
}
// init serviceMetadata
initServiceMetadata(consumer);
serviceMetadata.setServiceType(getServiceInterfaceClass());
// TODO, uncomment this line once service key is unified
serviceMetadata.generateServiceKey();
Map<String, String> referenceParameters = appendConfig();
ModuleServiceRepository repository = getScopeModel().getServiceRepository();
ServiceDescriptor serviceDescriptor;
if (CommonConstants.NATIVE_STUB.equals(getProxy())) {
serviceDescriptor = StubSuppliers.getServiceDescriptor(interfaceName);
repository.registerService(serviceDescriptor);
setInterface(serviceDescriptor.getInterfaceName());
} else {
serviceDescriptor = repository.registerService(interfaceClass);
}
consumerModel = new ConsumerModel(
serviceMetadata.getServiceKey(),
proxy,
serviceDescriptor,
getScopeModel(),
serviceMetadata,
createAsyncMethodInfo(),
interfaceClassLoader);
// Compatible with dependencies on ServiceModel#getReferenceConfig() , and will be removed in a future
// version.
consumerModel.setConfig(this);
repository.registerConsumer(consumerModel);
serviceMetadata.getAttachments().putAll(referenceParameters);
ref = createProxy(referenceParameters);
serviceMetadata.setTarget(ref);
serviceMetadata.addAttribute(PROXY_CLASS_REF, ref);
consumerModel.setDestroyRunner(getDestroyRunner());
consumerModel.setProxyObject(ref);
consumerModel.initMethodModels();
if (check) {
checkInvokerAvailable(0);
}
} catch (Throwable t) {
logAndCleanup(t);
throw t;
}
initialized = true;
} finally {
lock.unlock();
}
}
/**
* convert and aggregate async method info
*
* @return Map<String, AsyncMethodInfo>
*/
private Map<String, AsyncMethodInfo> createAsyncMethodInfo() {
Map<String, AsyncMethodInfo> attributes = null;
if (CollectionUtils.isNotEmpty(getMethods())) {
attributes = new HashMap<>(16);
for (MethodConfig methodConfig : getMethods()) {
AsyncMethodInfo asyncMethodInfo = methodConfig.convertMethodConfig2AsyncInfo();
if (asyncMethodInfo != null) {
attributes.put(methodConfig.getName(), asyncMethodInfo);
}
}
}
return attributes;
}
/**
* Append all configuration required for service reference.
*
* @return reference parameters
*/
private Map<String, String> appendConfig() {
Map<String, String> map = new HashMap<>(16);
map.put(INTERFACE_KEY, interfaceName);
map.put(SIDE_KEY, CONSUMER_SIDE);
ReferenceConfigBase.appendRuntimeParameters(map);
if (!ProtocolUtils.isGeneric(generic)) {
String revision = Version.getVersion(interfaceClass, version);
if (StringUtils.isNotEmpty(revision)) {
map.put(REVISION_KEY, revision);
}
String[] methods = methods(interfaceClass);
if (methods.length == 0) {
logger.warn(
CONFIG_NO_METHOD_FOUND,
"",
"",
"No method found in service interface: " + interfaceClass.getName());
map.put(METHODS_KEY, ANY_VALUE);
} else {
map.put(METHODS_KEY, StringUtils.join(new TreeSet<>(Arrays.asList(methods)), COMMA_SEPARATOR));
}
}
AbstractConfig.appendParameters(map, getApplication());
AbstractConfig.appendParameters(map, getModule());
AbstractConfig.appendParameters(map, consumer);
AbstractConfig.appendParameters(map, this);
String hostToRegistry = ConfigUtils.getSystemProperty(DUBBO_IP_TO_REGISTRY);
if (StringUtils.isEmpty(hostToRegistry)) {
hostToRegistry = NetUtils.getLocalHost();
} else if (isInvalidLocalHost(hostToRegistry)) {
throw new IllegalArgumentException("Specified invalid registry ip from property:" + DUBBO_IP_TO_REGISTRY
+ ", value:" + hostToRegistry);
}
map.put(REGISTER_IP_KEY, hostToRegistry);
if (CollectionUtils.isNotEmpty(getMethods())) {
for (MethodConfig methodConfig : getMethods()) {
AbstractConfig.appendParameters(map, methodConfig, methodConfig.getName());
String retryKey = methodConfig.getName() + ".retry";
if (map.containsKey(retryKey)) {
String retryValue = map.remove(retryKey);
if ("false".equals(retryValue)) {
map.put(methodConfig.getName() + ".retries", "0");
}
}
}
}
return map;
}
@SuppressWarnings({"unchecked"})
private T createProxy(Map<String, String> referenceParameters) {
urls.clear();
meshModeHandleUrl(referenceParameters);
if (StringUtils.isNotEmpty(url)) {
// user specified URL, could be peer-to-peer address, or register center's address.
parseUrl(referenceParameters);
} else {
// if protocols not in jvm checkRegistry
aggregateUrlFromRegistry(referenceParameters);
}
createInvoker();
if (logger.isInfoEnabled()) {
logger.info("Referred dubbo service: [" + referenceParameters.get(INTERFACE_KEY) + "]."
+ (ProtocolUtils.isGeneric(referenceParameters.get(GENERIC_KEY))
? " it's GenericService reference"
: " it's not GenericService reference"));
}
URL consumerUrl = new ServiceConfigURL(
CONSUMER_PROTOCOL,
referenceParameters.get(REGISTER_IP_KEY),
0,
referenceParameters.get(INTERFACE_KEY),
referenceParameters);
consumerUrl = consumerUrl.setScopeModel(getScopeModel());
consumerUrl = consumerUrl.setServiceModel(consumerModel);
MetadataUtils.publishServiceDefinition(consumerUrl, consumerModel.getServiceModel(), getApplicationModel());
// create service proxy
return (T) proxyFactory.getProxy(invoker, ProtocolUtils.isGeneric(generic));
}
/**
* if enable mesh mode, handle url.
*
* @param referenceParameters referenceParameters
*/
private void meshModeHandleUrl(Map<String, String> referenceParameters) {
if (!checkMeshConfig(referenceParameters)) {
return;
}
if (StringUtils.isNotEmpty(url)) {
// user specified URL, could be peer-to-peer address, or register center's address.
if (logger.isInfoEnabled()) {
logger.info("The url already exists, mesh no longer processes url: " + url);
}
return;
}
// get provider namespace if (@DubboReference, <reference provider-namespace="xx"/>) present
String podNamespace = referenceParameters.get(RegistryConstants.PROVIDER_NAMESPACE);
// get pod namespace from env if annotation not present the provider namespace
if (StringUtils.isEmpty(podNamespace)) {
if (StringUtils.isEmpty(System.getenv(POD_NAMESPACE))) {
if (logger.isWarnEnabled()) {
logger.warn(
CONFIG_FAILED_LOAD_ENV_VARIABLE,
"",
"",
"Can not get env variable: POD_NAMESPACE, it may not be running in the K8S environment , "
+ "finally use 'default' replace.");
}
podNamespace = "default";
} else {
podNamespace = System.getenv(POD_NAMESPACE);
}
}
// In mesh mode, providedBy equals K8S Service name.
String providedBy = referenceParameters.get(PROVIDED_BY);
// cluster_domain default is 'cluster.local',generally unchanged.
String clusterDomain =
Optional.ofNullable(System.getenv(CLUSTER_DOMAIN)).orElse(DEFAULT_CLUSTER_DOMAIN);
// By VirtualService and DestinationRule, envoy will generate a new route rule,such as
// 'demo.default.svc.cluster.local:80',the default port is 80.
Integer meshPort = Optional.ofNullable(getProviderPort()).orElse(DEFAULT_MESH_PORT);
// DubboReference default is -1, process it.
meshPort = meshPort > -1 ? meshPort : DEFAULT_MESH_PORT;
// get mesh url.
url = TRIPLE + "://" + providedBy + "." + podNamespace + SVC + clusterDomain + ":" + meshPort;
}
/**
* check if mesh config is correct
*
* @param referenceParameters referenceParameters
* @return mesh config is correct
*/
private boolean checkMeshConfig(Map<String, String> referenceParameters) {
if (!"true".equals(referenceParameters.getOrDefault(MESH_ENABLE, "false"))) {
// In mesh mode, unloadClusterRelated can only be false.
referenceParameters.put(UNLOAD_CLUSTER_RELATED, "false");
return false;
}
getScopeModel()
.getConfigManager()
.getProtocol(TRIPLE)
.orElseThrow(() -> new IllegalStateException("In mesh mode, a triple protocol must be specified"));
String providedBy = referenceParameters.get(PROVIDED_BY);
if (StringUtils.isEmpty(providedBy)) {
throw new IllegalStateException("In mesh mode, the providedBy of ReferenceConfig is must be set");
}
return true;
}
/**
* Parse the directly configured url.
*/
private void parseUrl(Map<String, String> referenceParameters) {
String[] us = SEMICOLON_SPLIT_PATTERN.split(url);
if (ArrayUtils.isNotEmpty(us)) {
for (String u : us) {
URL url = URL.valueOf(u);
if (StringUtils.isEmpty(url.getPath())) {
url = url.setPath(interfaceName);
}
url = url.setScopeModel(getScopeModel());
url = url.setServiceModel(consumerModel);
if (UrlUtils.isRegistry(url)) {
urls.add(url.putAttribute(REFER_KEY, referenceParameters));
} else {
URL peerUrl = getScopeModel()
.getApplicationModel()
.getBeanFactory()
.getBean(ClusterUtils.class)
.mergeUrl(url, referenceParameters);
peerUrl = peerUrl.putAttribute(PEER_KEY, true);
urls.add(peerUrl);
}
}
}
}
/**
* Get URLs from the registry and aggregate them.
*/
private void aggregateUrlFromRegistry(Map<String, String> referenceParameters) {
checkRegistry();
List<URL> us = ConfigValidationUtils.loadRegistries(this, false);
if (CollectionUtils.isNotEmpty(us)) {
for (URL u : us) {
URL monitorUrl = ConfigValidationUtils.loadMonitor(this, u);
if (monitorUrl != null) {
u = u.putAttribute(MONITOR_KEY, monitorUrl);
}
u = u.setScopeModel(getScopeModel());
u = u.setServiceModel(consumerModel);
if (isInjvm() != null && isInjvm()) {
u = u.addParameter(LOCAL_PROTOCOL, true);
}
urls.add(u.putAttribute(REFER_KEY, referenceParameters));
}
}
if (urls.isEmpty() && shouldJvmRefer(referenceParameters)) {
URL injvmUrl = new URL(LOCAL_PROTOCOL, LOCALHOST_VALUE, 0, interfaceClass.getName())
.addParameters(referenceParameters);
injvmUrl = injvmUrl.setScopeModel(getScopeModel());
injvmUrl = injvmUrl.setServiceModel(consumerModel);
urls.add(injvmUrl.putAttribute(REFER_KEY, referenceParameters));
}
if (urls.isEmpty()) {
throw new IllegalStateException("No such any registry to reference " + interfaceName + " on the consumer "
+ NetUtils.getLocalHost() + " use dubbo version "
+ Version.getVersion()
+ ", please config <dubbo:registry address=\"...\" /> to your spring config.");
}
}
/**
* \create a reference invoker
*/
@SuppressWarnings({"unchecked", "rawtypes"})
private void createInvoker() {
if (urls.size() == 1) {
URL curUrl = urls.get(0);
invoker = protocolSPI.refer(interfaceClass, curUrl);
// registry url, mesh-enable and unloadClusterRelated is true, not need Cluster.
if (!UrlUtils.isRegistry(curUrl) && !curUrl.getParameter(UNLOAD_CLUSTER_RELATED, false)) {
List<Invoker<?>> invokers = new ArrayList<>();
invokers.add(invoker);
invoker = Cluster.getCluster(getScopeModel(), Cluster.DEFAULT)
.join(new StaticDirectory(curUrl, invokers), true);
}
} else {
List<Invoker<?>> invokers = new ArrayList<>();
URL registryUrl = null;
for (URL url : urls) {
// For multi-registry scenarios, it is not checked whether each referInvoker is available.
// Because this invoker may become available later.
invokers.add(protocolSPI.refer(interfaceClass, url));
if (UrlUtils.isRegistry(url)) {
// use last registry url
registryUrl = url;
}
}
if (registryUrl != null) {
// registry url is available
// for multi-subscription scenario, use 'zone-aware' policy by default
String cluster = registryUrl.getParameter(CLUSTER_KEY, ZoneAwareCluster.NAME);
// The invoker wrap sequence would be: ZoneAwareClusterInvoker(StaticDirectory) ->
// FailoverClusterInvoker
// (RegistryDirectory, routing happens here) -> Invoker
invoker = Cluster.getCluster(registryUrl.getScopeModel(), cluster, false)
.join(new StaticDirectory(registryUrl, invokers), false);
} else {
// not a registry url, must be direct invoke.
if (CollectionUtils.isEmpty(invokers)) {
throw new IllegalArgumentException("invokers == null");
}
URL curUrl = invokers.get(0).getUrl();
String cluster = curUrl.getParameter(CLUSTER_KEY, Cluster.DEFAULT);
invoker =
Cluster.getCluster(getScopeModel(), cluster).join(new StaticDirectory(curUrl, invokers), true);
}
}
}
private void checkInvokerAvailable(long timeout) throws IllegalStateException {
if (!shouldCheck()) {
return;
}
boolean available = invoker.isAvailable();
if (available) {
return;
}
long startTime = System.currentTimeMillis();
long checkDeadline = startTime + timeout;
do {
try {
Thread.sleep(100);
} catch (InterruptedException e) {
Thread.currentThread().interrupt();
break;
}
available = invoker.isAvailable();
} while (!available && checkDeadline > System.currentTimeMillis());
logger.warn(
LoggerCodeConstants.REGISTRY_EMPTY_ADDRESS,
"",
"",
"Check reference of [" + getUniqueServiceName() + "] failed very beginning. " + "After "
+ (System.currentTimeMillis() - startTime) + "ms reties, finally "
+ (available ? "succeed" : "failed")
+ ".");
if (!available) {
// 2-2 - No provider available.
IllegalStateException illegalStateException =
new IllegalStateException("Failed to check the status of the service "
+ interfaceName
+ ". No provider available for the service "
+ (group == null ? "" : group + "/")
+ interfaceName + (version == null ? "" : ":" + version)
+ " from the url "
+ invoker.getUrl()
+ " to the consumer "
+ NetUtils.getLocalHost() + " use dubbo version " + Version.getVersion());
logger.error(
CLUSTER_NO_VALID_PROVIDER,
"provider not started",
"",
"No provider available.",
illegalStateException);
throw illegalStateException;
}
}
/**
* This method should be called right after the creation of this class's instance, before any property in other config modules is used.
* Check each config modules are created properly and override their properties if necessary.
*/
protected void checkAndUpdateSubConfigs() {
if (StringUtils.isEmpty(interfaceName)) {
throw new IllegalStateException("<dubbo:reference interface=\"\" /> interface not allow null!");
}
// get consumer's global configuration
completeCompoundConfigs();
// init some null configuration.
List<ConfigInitializer> configInitializers = this.getExtensionLoader(ConfigInitializer.class)
.getActivateExtension(URL.valueOf("configInitializer://"), (String[]) null);
configInitializers.forEach(e -> e.initReferConfig(this));
if (getGeneric() == null && getConsumer() != null) {
setGeneric(getConsumer().getGeneric());
}
if (ProtocolUtils.isGeneric(generic)) {
if (interfaceClass != null && !interfaceClass.equals(GenericService.class)) {
logger.warn(
CONFIG_PROPERTY_CONFLICT,
"",
"",
String.format(
"Found conflicting attributes for interface type: [interfaceClass=%s] and [generic=%s], "
+ "because the 'generic' attribute has higher priority than 'interfaceClass', so change 'interfaceClass' to '%s'. "
+ "Note: it will make this reference bean as a candidate bean of type '%s' instead of '%s' when resolving dependency in Spring.",
interfaceClass.getName(),
generic,
GenericService.class.getName(),
GenericService.class.getName(),
interfaceClass.getName()));
}
interfaceClass = GenericService.class;
} else {
try {
if (getInterfaceClassLoader() != null
&& (interfaceClass == null || interfaceClass.getClassLoader() != getInterfaceClassLoader())) {
interfaceClass = Class.forName(interfaceName, true, getInterfaceClassLoader());
} else if (interfaceClass == null) {
interfaceClass = Class.forName(
interfaceName, true, Thread.currentThread().getContextClassLoader());
}
} catch (ClassNotFoundException e) {
throw new IllegalStateException(e.getMessage(), e);
}
}
checkStubAndLocal(interfaceClass);
if (StringUtils.isEmpty(url)) {
checkRegistry();
}
resolveFile();
ConfigValidationUtils.validateReferenceConfig(this);
postProcessConfig();
}
@Override
protected void postProcessRefresh() {
super.postProcessRefresh();
checkAndUpdateSubConfigs();
}
protected void completeCompoundConfigs() {
super.completeCompoundConfigs(consumer);
if (consumer != null) {
if (StringUtils.isEmpty(registryIds)) {
setRegistryIds(consumer.getRegistryIds());
}
}
}
/**
* Figure out should refer the service in the same JVM from configurations. The default behavior is true
* 1. if injvm is specified, then use it
* 2. then if a url is specified, then assume it's a remote call
* 3. otherwise, check scope parameter
* 4. if scope is not specified but the target service is provided in the same JVM, then prefer to make the local
* call, which is the default behavior
*/
protected boolean shouldJvmRefer(Map<String, String> map) {
boolean isJvmRefer;
if (isInjvm() == null) {
// if an url is specified, don't do local reference
if (StringUtils.isNotEmpty(url)) {
isJvmRefer = false;
} else {
// by default, reference local service if there is
URL tmpUrl = new ServiceConfigURL("temp", "localhost", 0, map);
isJvmRefer = InjvmProtocol.getInjvmProtocol(getScopeModel()).isInjvmRefer(tmpUrl);
}
} else {
isJvmRefer = isInjvm();
}
return isJvmRefer;
}
private void postProcessConfig() {
List<ConfigPostProcessor> configPostProcessors = this.getExtensionLoader(ConfigPostProcessor.class)
.getActivateExtension(URL.valueOf("configPostProcessor://"), (String[]) null);
HashSet<ConfigPostProcessor> allConfigPostProcessor = new HashSet<>();
// merge common and old config
allConfigPostProcessor.addAll(configPostProcessors);
allConfigPostProcessor.addAll(configPostProcessors);
allConfigPostProcessor.forEach(component -> component.postProcessReferConfig(this));
}
/**
* Return if ReferenceConfig has been initialized
* Note: Cannot use `isInitialized` as it may be treated as a Java Bean property
*
* @return initialized
*/
@Transient
public boolean configInitialized() {
return initialized;
}
/**
* just for test
*
* @return
*/
@Deprecated
@Transient
public Invoker<?> getInvoker() {
return invoker;
}
@Transient
public Runnable getDestroyRunner() {
return this::destroy;
}
}
|
googleapis/google-cloud-java | 36,911 | java-datastream/proto-google-cloud-datastream-v1/src/main/java/com/google/cloud/datastream/v1/MysqlTable.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/datastream/v1/datastream_resources.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.datastream.v1;
/**
*
*
* <pre>
* MySQL table.
* </pre>
*
* Protobuf type {@code google.cloud.datastream.v1.MysqlTable}
*/
public final class MysqlTable extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.datastream.v1.MysqlTable)
MysqlTableOrBuilder {
private static final long serialVersionUID = 0L;
// Use MysqlTable.newBuilder() to construct.
private MysqlTable(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private MysqlTable() {
table_ = "";
mysqlColumns_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new MysqlTable();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datastream.v1.DatastreamResourcesProto
.internal_static_google_cloud_datastream_v1_MysqlTable_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datastream.v1.DatastreamResourcesProto
.internal_static_google_cloud_datastream_v1_MysqlTable_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datastream.v1.MysqlTable.class,
com.google.cloud.datastream.v1.MysqlTable.Builder.class);
}
public static final int TABLE_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object table_ = "";
/**
*
*
* <pre>
* Table name.
* </pre>
*
* <code>string table = 1;</code>
*
* @return The table.
*/
@java.lang.Override
public java.lang.String getTable() {
java.lang.Object ref = table_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
table_ = s;
return s;
}
}
/**
*
*
* <pre>
* Table name.
* </pre>
*
* <code>string table = 1;</code>
*
* @return The bytes for table.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTableBytes() {
java.lang.Object ref = table_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
table_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int MYSQL_COLUMNS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.datastream.v1.MysqlColumn> mysqlColumns_;
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude objects, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.MysqlColumn mysql_columns = 2;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.datastream.v1.MysqlColumn> getMysqlColumnsList() {
return mysqlColumns_;
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude objects, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.MysqlColumn mysql_columns = 2;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.datastream.v1.MysqlColumnOrBuilder>
getMysqlColumnsOrBuilderList() {
return mysqlColumns_;
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude objects, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.MysqlColumn mysql_columns = 2;</code>
*/
@java.lang.Override
public int getMysqlColumnsCount() {
return mysqlColumns_.size();
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude objects, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.MysqlColumn mysql_columns = 2;</code>
*/
@java.lang.Override
public com.google.cloud.datastream.v1.MysqlColumn getMysqlColumns(int index) {
return mysqlColumns_.get(index);
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude objects, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.MysqlColumn mysql_columns = 2;</code>
*/
@java.lang.Override
public com.google.cloud.datastream.v1.MysqlColumnOrBuilder getMysqlColumnsOrBuilder(int index) {
return mysqlColumns_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(table_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, table_);
}
for (int i = 0; i < mysqlColumns_.size(); i++) {
output.writeMessage(2, mysqlColumns_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(table_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, table_);
}
for (int i = 0; i < mysqlColumns_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, mysqlColumns_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.datastream.v1.MysqlTable)) {
return super.equals(obj);
}
com.google.cloud.datastream.v1.MysqlTable other =
(com.google.cloud.datastream.v1.MysqlTable) obj;
if (!getTable().equals(other.getTable())) return false;
if (!getMysqlColumnsList().equals(other.getMysqlColumnsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + TABLE_FIELD_NUMBER;
hash = (53 * hash) + getTable().hashCode();
if (getMysqlColumnsCount() > 0) {
hash = (37 * hash) + MYSQL_COLUMNS_FIELD_NUMBER;
hash = (53 * hash) + getMysqlColumnsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.datastream.v1.MysqlTable parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datastream.v1.MysqlTable parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datastream.v1.MysqlTable parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datastream.v1.MysqlTable parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datastream.v1.MysqlTable parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.datastream.v1.MysqlTable parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.datastream.v1.MysqlTable parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datastream.v1.MysqlTable parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datastream.v1.MysqlTable parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.datastream.v1.MysqlTable parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.datastream.v1.MysqlTable parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.datastream.v1.MysqlTable parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.datastream.v1.MysqlTable prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* MySQL table.
* </pre>
*
* Protobuf type {@code google.cloud.datastream.v1.MysqlTable}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.datastream.v1.MysqlTable)
com.google.cloud.datastream.v1.MysqlTableOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.datastream.v1.DatastreamResourcesProto
.internal_static_google_cloud_datastream_v1_MysqlTable_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.datastream.v1.DatastreamResourcesProto
.internal_static_google_cloud_datastream_v1_MysqlTable_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.datastream.v1.MysqlTable.class,
com.google.cloud.datastream.v1.MysqlTable.Builder.class);
}
// Construct using com.google.cloud.datastream.v1.MysqlTable.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
table_ = "";
if (mysqlColumnsBuilder_ == null) {
mysqlColumns_ = java.util.Collections.emptyList();
} else {
mysqlColumns_ = null;
mysqlColumnsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.datastream.v1.DatastreamResourcesProto
.internal_static_google_cloud_datastream_v1_MysqlTable_descriptor;
}
@java.lang.Override
public com.google.cloud.datastream.v1.MysqlTable getDefaultInstanceForType() {
return com.google.cloud.datastream.v1.MysqlTable.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.datastream.v1.MysqlTable build() {
com.google.cloud.datastream.v1.MysqlTable result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.datastream.v1.MysqlTable buildPartial() {
com.google.cloud.datastream.v1.MysqlTable result =
new com.google.cloud.datastream.v1.MysqlTable(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.cloud.datastream.v1.MysqlTable result) {
if (mysqlColumnsBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
mysqlColumns_ = java.util.Collections.unmodifiableList(mysqlColumns_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.mysqlColumns_ = mysqlColumns_;
} else {
result.mysqlColumns_ = mysqlColumnsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.datastream.v1.MysqlTable result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.table_ = table_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.datastream.v1.MysqlTable) {
return mergeFrom((com.google.cloud.datastream.v1.MysqlTable) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.datastream.v1.MysqlTable other) {
if (other == com.google.cloud.datastream.v1.MysqlTable.getDefaultInstance()) return this;
if (!other.getTable().isEmpty()) {
table_ = other.table_;
bitField0_ |= 0x00000001;
onChanged();
}
if (mysqlColumnsBuilder_ == null) {
if (!other.mysqlColumns_.isEmpty()) {
if (mysqlColumns_.isEmpty()) {
mysqlColumns_ = other.mysqlColumns_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureMysqlColumnsIsMutable();
mysqlColumns_.addAll(other.mysqlColumns_);
}
onChanged();
}
} else {
if (!other.mysqlColumns_.isEmpty()) {
if (mysqlColumnsBuilder_.isEmpty()) {
mysqlColumnsBuilder_.dispose();
mysqlColumnsBuilder_ = null;
mysqlColumns_ = other.mysqlColumns_;
bitField0_ = (bitField0_ & ~0x00000002);
mysqlColumnsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getMysqlColumnsFieldBuilder()
: null;
} else {
mysqlColumnsBuilder_.addAllMessages(other.mysqlColumns_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
table_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
com.google.cloud.datastream.v1.MysqlColumn m =
input.readMessage(
com.google.cloud.datastream.v1.MysqlColumn.parser(), extensionRegistry);
if (mysqlColumnsBuilder_ == null) {
ensureMysqlColumnsIsMutable();
mysqlColumns_.add(m);
} else {
mysqlColumnsBuilder_.addMessage(m);
}
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object table_ = "";
/**
*
*
* <pre>
* Table name.
* </pre>
*
* <code>string table = 1;</code>
*
* @return The table.
*/
public java.lang.String getTable() {
java.lang.Object ref = table_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
table_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Table name.
* </pre>
*
* <code>string table = 1;</code>
*
* @return The bytes for table.
*/
public com.google.protobuf.ByteString getTableBytes() {
java.lang.Object ref = table_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
table_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Table name.
* </pre>
*
* <code>string table = 1;</code>
*
* @param value The table to set.
* @return This builder for chaining.
*/
public Builder setTable(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
table_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Table name.
* </pre>
*
* <code>string table = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearTable() {
table_ = getDefaultInstance().getTable();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Table name.
* </pre>
*
* <code>string table = 1;</code>
*
* @param value The bytes for table to set.
* @return This builder for chaining.
*/
public Builder setTableBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
table_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.util.List<com.google.cloud.datastream.v1.MysqlColumn> mysqlColumns_ =
java.util.Collections.emptyList();
private void ensureMysqlColumnsIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
mysqlColumns_ =
new java.util.ArrayList<com.google.cloud.datastream.v1.MysqlColumn>(mysqlColumns_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datastream.v1.MysqlColumn,
com.google.cloud.datastream.v1.MysqlColumn.Builder,
com.google.cloud.datastream.v1.MysqlColumnOrBuilder>
mysqlColumnsBuilder_;
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude objects, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.MysqlColumn mysql_columns = 2;</code>
*/
public java.util.List<com.google.cloud.datastream.v1.MysqlColumn> getMysqlColumnsList() {
if (mysqlColumnsBuilder_ == null) {
return java.util.Collections.unmodifiableList(mysqlColumns_);
} else {
return mysqlColumnsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude objects, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.MysqlColumn mysql_columns = 2;</code>
*/
public int getMysqlColumnsCount() {
if (mysqlColumnsBuilder_ == null) {
return mysqlColumns_.size();
} else {
return mysqlColumnsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude objects, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.MysqlColumn mysql_columns = 2;</code>
*/
public com.google.cloud.datastream.v1.MysqlColumn getMysqlColumns(int index) {
if (mysqlColumnsBuilder_ == null) {
return mysqlColumns_.get(index);
} else {
return mysqlColumnsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude objects, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.MysqlColumn mysql_columns = 2;</code>
*/
public Builder setMysqlColumns(int index, com.google.cloud.datastream.v1.MysqlColumn value) {
if (mysqlColumnsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMysqlColumnsIsMutable();
mysqlColumns_.set(index, value);
onChanged();
} else {
mysqlColumnsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude objects, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.MysqlColumn mysql_columns = 2;</code>
*/
public Builder setMysqlColumns(
int index, com.google.cloud.datastream.v1.MysqlColumn.Builder builderForValue) {
if (mysqlColumnsBuilder_ == null) {
ensureMysqlColumnsIsMutable();
mysqlColumns_.set(index, builderForValue.build());
onChanged();
} else {
mysqlColumnsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude objects, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.MysqlColumn mysql_columns = 2;</code>
*/
public Builder addMysqlColumns(com.google.cloud.datastream.v1.MysqlColumn value) {
if (mysqlColumnsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMysqlColumnsIsMutable();
mysqlColumns_.add(value);
onChanged();
} else {
mysqlColumnsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude objects, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.MysqlColumn mysql_columns = 2;</code>
*/
public Builder addMysqlColumns(int index, com.google.cloud.datastream.v1.MysqlColumn value) {
if (mysqlColumnsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMysqlColumnsIsMutable();
mysqlColumns_.add(index, value);
onChanged();
} else {
mysqlColumnsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude objects, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.MysqlColumn mysql_columns = 2;</code>
*/
public Builder addMysqlColumns(
com.google.cloud.datastream.v1.MysqlColumn.Builder builderForValue) {
if (mysqlColumnsBuilder_ == null) {
ensureMysqlColumnsIsMutable();
mysqlColumns_.add(builderForValue.build());
onChanged();
} else {
mysqlColumnsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude objects, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.MysqlColumn mysql_columns = 2;</code>
*/
public Builder addMysqlColumns(
int index, com.google.cloud.datastream.v1.MysqlColumn.Builder builderForValue) {
if (mysqlColumnsBuilder_ == null) {
ensureMysqlColumnsIsMutable();
mysqlColumns_.add(index, builderForValue.build());
onChanged();
} else {
mysqlColumnsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude objects, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.MysqlColumn mysql_columns = 2;</code>
*/
public Builder addAllMysqlColumns(
java.lang.Iterable<? extends com.google.cloud.datastream.v1.MysqlColumn> values) {
if (mysqlColumnsBuilder_ == null) {
ensureMysqlColumnsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, mysqlColumns_);
onChanged();
} else {
mysqlColumnsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude objects, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.MysqlColumn mysql_columns = 2;</code>
*/
public Builder clearMysqlColumns() {
if (mysqlColumnsBuilder_ == null) {
mysqlColumns_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
mysqlColumnsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude objects, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.MysqlColumn mysql_columns = 2;</code>
*/
public Builder removeMysqlColumns(int index) {
if (mysqlColumnsBuilder_ == null) {
ensureMysqlColumnsIsMutable();
mysqlColumns_.remove(index);
onChanged();
} else {
mysqlColumnsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude objects, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.MysqlColumn mysql_columns = 2;</code>
*/
public com.google.cloud.datastream.v1.MysqlColumn.Builder getMysqlColumnsBuilder(int index) {
return getMysqlColumnsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude objects, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.MysqlColumn mysql_columns = 2;</code>
*/
public com.google.cloud.datastream.v1.MysqlColumnOrBuilder getMysqlColumnsOrBuilder(int index) {
if (mysqlColumnsBuilder_ == null) {
return mysqlColumns_.get(index);
} else {
return mysqlColumnsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude objects, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.MysqlColumn mysql_columns = 2;</code>
*/
public java.util.List<? extends com.google.cloud.datastream.v1.MysqlColumnOrBuilder>
getMysqlColumnsOrBuilderList() {
if (mysqlColumnsBuilder_ != null) {
return mysqlColumnsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(mysqlColumns_);
}
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude objects, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.MysqlColumn mysql_columns = 2;</code>
*/
public com.google.cloud.datastream.v1.MysqlColumn.Builder addMysqlColumnsBuilder() {
return getMysqlColumnsFieldBuilder()
.addBuilder(com.google.cloud.datastream.v1.MysqlColumn.getDefaultInstance());
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude objects, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.MysqlColumn mysql_columns = 2;</code>
*/
public com.google.cloud.datastream.v1.MysqlColumn.Builder addMysqlColumnsBuilder(int index) {
return getMysqlColumnsFieldBuilder()
.addBuilder(index, com.google.cloud.datastream.v1.MysqlColumn.getDefaultInstance());
}
/**
*
*
* <pre>
* MySQL columns in the database.
* When unspecified as part of include/exclude objects, includes/excludes
* everything.
* </pre>
*
* <code>repeated .google.cloud.datastream.v1.MysqlColumn mysql_columns = 2;</code>
*/
public java.util.List<com.google.cloud.datastream.v1.MysqlColumn.Builder>
getMysqlColumnsBuilderList() {
return getMysqlColumnsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datastream.v1.MysqlColumn,
com.google.cloud.datastream.v1.MysqlColumn.Builder,
com.google.cloud.datastream.v1.MysqlColumnOrBuilder>
getMysqlColumnsFieldBuilder() {
if (mysqlColumnsBuilder_ == null) {
mysqlColumnsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.datastream.v1.MysqlColumn,
com.google.cloud.datastream.v1.MysqlColumn.Builder,
com.google.cloud.datastream.v1.MysqlColumnOrBuilder>(
mysqlColumns_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean());
mysqlColumns_ = null;
}
return mysqlColumnsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.datastream.v1.MysqlTable)
}
// @@protoc_insertion_point(class_scope:google.cloud.datastream.v1.MysqlTable)
private static final com.google.cloud.datastream.v1.MysqlTable DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.datastream.v1.MysqlTable();
}
public static com.google.cloud.datastream.v1.MysqlTable getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<MysqlTable> PARSER =
new com.google.protobuf.AbstractParser<MysqlTable>() {
@java.lang.Override
public MysqlTable parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<MysqlTable> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<MysqlTable> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.datastream.v1.MysqlTable getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,986 | java-analyticshub/proto-google-cloud-analyticshub-v1/src/main/java/com/google/cloud/bigquery/analyticshub/v1/RetryPolicy.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/bigquery/analyticshub/v1/pubsub.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.bigquery.analyticshub.v1;
/**
*
*
* <pre>
* A policy that specifies how Pub/Sub retries message delivery.
*
* Retry delay will be exponential based on provided minimum and maximum
* backoffs. https://en.wikipedia.org/wiki/Exponential_backoff.
*
* RetryPolicy will be triggered on NACKs or acknowledgement deadline exceeded
* events for a given message.
*
* Retry Policy is implemented on a best effort basis. At times, the delay
* between consecutive deliveries may not match the configuration. That is,
* delay can be more or less than configured backoff.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.analyticshub.v1.RetryPolicy}
*/
public final class RetryPolicy extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.bigquery.analyticshub.v1.RetryPolicy)
RetryPolicyOrBuilder {
private static final long serialVersionUID = 0L;
// Use RetryPolicy.newBuilder() to construct.
private RetryPolicy(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private RetryPolicy() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new RetryPolicy();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.analyticshub.v1.PubsubProto
.internal_static_google_cloud_bigquery_analyticshub_v1_RetryPolicy_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.analyticshub.v1.PubsubProto
.internal_static_google_cloud_bigquery_analyticshub_v1_RetryPolicy_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.analyticshub.v1.RetryPolicy.class,
com.google.cloud.bigquery.analyticshub.v1.RetryPolicy.Builder.class);
}
private int bitField0_;
public static final int MINIMUM_BACKOFF_FIELD_NUMBER = 1;
private com.google.protobuf.Duration minimumBackoff_;
/**
*
*
* <pre>
* Optional. The minimum delay between consecutive deliveries of a given
* message. Value should be between 0 and 600 seconds. Defaults to 10 seconds.
* </pre>
*
* <code>.google.protobuf.Duration minimum_backoff = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the minimumBackoff field is set.
*/
@java.lang.Override
public boolean hasMinimumBackoff() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. The minimum delay between consecutive deliveries of a given
* message. Value should be between 0 and 600 seconds. Defaults to 10 seconds.
* </pre>
*
* <code>.google.protobuf.Duration minimum_backoff = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The minimumBackoff.
*/
@java.lang.Override
public com.google.protobuf.Duration getMinimumBackoff() {
return minimumBackoff_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: minimumBackoff_;
}
/**
*
*
* <pre>
* Optional. The minimum delay between consecutive deliveries of a given
* message. Value should be between 0 and 600 seconds. Defaults to 10 seconds.
* </pre>
*
* <code>.google.protobuf.Duration minimum_backoff = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.protobuf.DurationOrBuilder getMinimumBackoffOrBuilder() {
return minimumBackoff_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: minimumBackoff_;
}
public static final int MAXIMUM_BACKOFF_FIELD_NUMBER = 2;
private com.google.protobuf.Duration maximumBackoff_;
/**
*
*
* <pre>
* Optional. The maximum delay between consecutive deliveries of a given
* message. Value should be between 0 and 600 seconds. Defaults to 600
* seconds.
* </pre>
*
* <code>.google.protobuf.Duration maximum_backoff = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the maximumBackoff field is set.
*/
@java.lang.Override
public boolean hasMaximumBackoff() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. The maximum delay between consecutive deliveries of a given
* message. Value should be between 0 and 600 seconds. Defaults to 600
* seconds.
* </pre>
*
* <code>.google.protobuf.Duration maximum_backoff = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The maximumBackoff.
*/
@java.lang.Override
public com.google.protobuf.Duration getMaximumBackoff() {
return maximumBackoff_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: maximumBackoff_;
}
/**
*
*
* <pre>
* Optional. The maximum delay between consecutive deliveries of a given
* message. Value should be between 0 and 600 seconds. Defaults to 600
* seconds.
* </pre>
*
* <code>.google.protobuf.Duration maximum_backoff = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.protobuf.DurationOrBuilder getMaximumBackoffOrBuilder() {
return maximumBackoff_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: maximumBackoff_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getMinimumBackoff());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getMaximumBackoff());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getMinimumBackoff());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getMaximumBackoff());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.bigquery.analyticshub.v1.RetryPolicy)) {
return super.equals(obj);
}
com.google.cloud.bigquery.analyticshub.v1.RetryPolicy other =
(com.google.cloud.bigquery.analyticshub.v1.RetryPolicy) obj;
if (hasMinimumBackoff() != other.hasMinimumBackoff()) return false;
if (hasMinimumBackoff()) {
if (!getMinimumBackoff().equals(other.getMinimumBackoff())) return false;
}
if (hasMaximumBackoff() != other.hasMaximumBackoff()) return false;
if (hasMaximumBackoff()) {
if (!getMaximumBackoff().equals(other.getMaximumBackoff())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasMinimumBackoff()) {
hash = (37 * hash) + MINIMUM_BACKOFF_FIELD_NUMBER;
hash = (53 * hash) + getMinimumBackoff().hashCode();
}
if (hasMaximumBackoff()) {
hash = (37 * hash) + MAXIMUM_BACKOFF_FIELD_NUMBER;
hash = (53 * hash) + getMaximumBackoff().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.bigquery.analyticshub.v1.RetryPolicy parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.analyticshub.v1.RetryPolicy parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.analyticshub.v1.RetryPolicy parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.analyticshub.v1.RetryPolicy parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.analyticshub.v1.RetryPolicy parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.analyticshub.v1.RetryPolicy parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.analyticshub.v1.RetryPolicy parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.analyticshub.v1.RetryPolicy parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.analyticshub.v1.RetryPolicy parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.analyticshub.v1.RetryPolicy parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.analyticshub.v1.RetryPolicy parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.analyticshub.v1.RetryPolicy parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.bigquery.analyticshub.v1.RetryPolicy prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A policy that specifies how Pub/Sub retries message delivery.
*
* Retry delay will be exponential based on provided minimum and maximum
* backoffs. https://en.wikipedia.org/wiki/Exponential_backoff.
*
* RetryPolicy will be triggered on NACKs or acknowledgement deadline exceeded
* events for a given message.
*
* Retry Policy is implemented on a best effort basis. At times, the delay
* between consecutive deliveries may not match the configuration. That is,
* delay can be more or less than configured backoff.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.analyticshub.v1.RetryPolicy}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.bigquery.analyticshub.v1.RetryPolicy)
com.google.cloud.bigquery.analyticshub.v1.RetryPolicyOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.analyticshub.v1.PubsubProto
.internal_static_google_cloud_bigquery_analyticshub_v1_RetryPolicy_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.analyticshub.v1.PubsubProto
.internal_static_google_cloud_bigquery_analyticshub_v1_RetryPolicy_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.analyticshub.v1.RetryPolicy.class,
com.google.cloud.bigquery.analyticshub.v1.RetryPolicy.Builder.class);
}
// Construct using com.google.cloud.bigquery.analyticshub.v1.RetryPolicy.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getMinimumBackoffFieldBuilder();
getMaximumBackoffFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
minimumBackoff_ = null;
if (minimumBackoffBuilder_ != null) {
minimumBackoffBuilder_.dispose();
minimumBackoffBuilder_ = null;
}
maximumBackoff_ = null;
if (maximumBackoffBuilder_ != null) {
maximumBackoffBuilder_.dispose();
maximumBackoffBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.bigquery.analyticshub.v1.PubsubProto
.internal_static_google_cloud_bigquery_analyticshub_v1_RetryPolicy_descriptor;
}
@java.lang.Override
public com.google.cloud.bigquery.analyticshub.v1.RetryPolicy getDefaultInstanceForType() {
return com.google.cloud.bigquery.analyticshub.v1.RetryPolicy.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.bigquery.analyticshub.v1.RetryPolicy build() {
com.google.cloud.bigquery.analyticshub.v1.RetryPolicy result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.bigquery.analyticshub.v1.RetryPolicy buildPartial() {
com.google.cloud.bigquery.analyticshub.v1.RetryPolicy result =
new com.google.cloud.bigquery.analyticshub.v1.RetryPolicy(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.bigquery.analyticshub.v1.RetryPolicy result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.minimumBackoff_ =
minimumBackoffBuilder_ == null ? minimumBackoff_ : minimumBackoffBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.maximumBackoff_ =
maximumBackoffBuilder_ == null ? maximumBackoff_ : maximumBackoffBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.bigquery.analyticshub.v1.RetryPolicy) {
return mergeFrom((com.google.cloud.bigquery.analyticshub.v1.RetryPolicy) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.bigquery.analyticshub.v1.RetryPolicy other) {
if (other == com.google.cloud.bigquery.analyticshub.v1.RetryPolicy.getDefaultInstance())
return this;
if (other.hasMinimumBackoff()) {
mergeMinimumBackoff(other.getMinimumBackoff());
}
if (other.hasMaximumBackoff()) {
mergeMaximumBackoff(other.getMaximumBackoff());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getMinimumBackoffFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getMaximumBackoffFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.Duration minimumBackoff_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
minimumBackoffBuilder_;
/**
*
*
* <pre>
* Optional. The minimum delay between consecutive deliveries of a given
* message. Value should be between 0 and 600 seconds. Defaults to 10 seconds.
* </pre>
*
* <code>
* .google.protobuf.Duration minimum_backoff = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the minimumBackoff field is set.
*/
public boolean hasMinimumBackoff() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. The minimum delay between consecutive deliveries of a given
* message. Value should be between 0 and 600 seconds. Defaults to 10 seconds.
* </pre>
*
* <code>
* .google.protobuf.Duration minimum_backoff = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The minimumBackoff.
*/
public com.google.protobuf.Duration getMinimumBackoff() {
if (minimumBackoffBuilder_ == null) {
return minimumBackoff_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: minimumBackoff_;
} else {
return minimumBackoffBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. The minimum delay between consecutive deliveries of a given
* message. Value should be between 0 and 600 seconds. Defaults to 10 seconds.
* </pre>
*
* <code>
* .google.protobuf.Duration minimum_backoff = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setMinimumBackoff(com.google.protobuf.Duration value) {
if (minimumBackoffBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
minimumBackoff_ = value;
} else {
minimumBackoffBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The minimum delay between consecutive deliveries of a given
* message. Value should be between 0 and 600 seconds. Defaults to 10 seconds.
* </pre>
*
* <code>
* .google.protobuf.Duration minimum_backoff = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setMinimumBackoff(com.google.protobuf.Duration.Builder builderForValue) {
if (minimumBackoffBuilder_ == null) {
minimumBackoff_ = builderForValue.build();
} else {
minimumBackoffBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The minimum delay between consecutive deliveries of a given
* message. Value should be between 0 and 600 seconds. Defaults to 10 seconds.
* </pre>
*
* <code>
* .google.protobuf.Duration minimum_backoff = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeMinimumBackoff(com.google.protobuf.Duration value) {
if (minimumBackoffBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& minimumBackoff_ != null
&& minimumBackoff_ != com.google.protobuf.Duration.getDefaultInstance()) {
getMinimumBackoffBuilder().mergeFrom(value);
} else {
minimumBackoff_ = value;
}
} else {
minimumBackoffBuilder_.mergeFrom(value);
}
if (minimumBackoff_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. The minimum delay between consecutive deliveries of a given
* message. Value should be between 0 and 600 seconds. Defaults to 10 seconds.
* </pre>
*
* <code>
* .google.protobuf.Duration minimum_backoff = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearMinimumBackoff() {
bitField0_ = (bitField0_ & ~0x00000001);
minimumBackoff_ = null;
if (minimumBackoffBuilder_ != null) {
minimumBackoffBuilder_.dispose();
minimumBackoffBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The minimum delay between consecutive deliveries of a given
* message. Value should be between 0 and 600 seconds. Defaults to 10 seconds.
* </pre>
*
* <code>
* .google.protobuf.Duration minimum_backoff = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.Duration.Builder getMinimumBackoffBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getMinimumBackoffFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. The minimum delay between consecutive deliveries of a given
* message. Value should be between 0 and 600 seconds. Defaults to 10 seconds.
* </pre>
*
* <code>
* .google.protobuf.Duration minimum_backoff = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.DurationOrBuilder getMinimumBackoffOrBuilder() {
if (minimumBackoffBuilder_ != null) {
return minimumBackoffBuilder_.getMessageOrBuilder();
} else {
return minimumBackoff_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: minimumBackoff_;
}
}
/**
*
*
* <pre>
* Optional. The minimum delay between consecutive deliveries of a given
* message. Value should be between 0 and 600 seconds. Defaults to 10 seconds.
* </pre>
*
* <code>
* .google.protobuf.Duration minimum_backoff = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
getMinimumBackoffFieldBuilder() {
if (minimumBackoffBuilder_ == null) {
minimumBackoffBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>(
getMinimumBackoff(), getParentForChildren(), isClean());
minimumBackoff_ = null;
}
return minimumBackoffBuilder_;
}
private com.google.protobuf.Duration maximumBackoff_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
maximumBackoffBuilder_;
/**
*
*
* <pre>
* Optional. The maximum delay between consecutive deliveries of a given
* message. Value should be between 0 and 600 seconds. Defaults to 600
* seconds.
* </pre>
*
* <code>
* .google.protobuf.Duration maximum_backoff = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the maximumBackoff field is set.
*/
public boolean hasMaximumBackoff() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Optional. The maximum delay between consecutive deliveries of a given
* message. Value should be between 0 and 600 seconds. Defaults to 600
* seconds.
* </pre>
*
* <code>
* .google.protobuf.Duration maximum_backoff = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The maximumBackoff.
*/
public com.google.protobuf.Duration getMaximumBackoff() {
if (maximumBackoffBuilder_ == null) {
return maximumBackoff_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: maximumBackoff_;
} else {
return maximumBackoffBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. The maximum delay between consecutive deliveries of a given
* message. Value should be between 0 and 600 seconds. Defaults to 600
* seconds.
* </pre>
*
* <code>
* .google.protobuf.Duration maximum_backoff = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setMaximumBackoff(com.google.protobuf.Duration value) {
if (maximumBackoffBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
maximumBackoff_ = value;
} else {
maximumBackoffBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The maximum delay between consecutive deliveries of a given
* message. Value should be between 0 and 600 seconds. Defaults to 600
* seconds.
* </pre>
*
* <code>
* .google.protobuf.Duration maximum_backoff = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setMaximumBackoff(com.google.protobuf.Duration.Builder builderForValue) {
if (maximumBackoffBuilder_ == null) {
maximumBackoff_ = builderForValue.build();
} else {
maximumBackoffBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The maximum delay between consecutive deliveries of a given
* message. Value should be between 0 and 600 seconds. Defaults to 600
* seconds.
* </pre>
*
* <code>
* .google.protobuf.Duration maximum_backoff = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeMaximumBackoff(com.google.protobuf.Duration value) {
if (maximumBackoffBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& maximumBackoff_ != null
&& maximumBackoff_ != com.google.protobuf.Duration.getDefaultInstance()) {
getMaximumBackoffBuilder().mergeFrom(value);
} else {
maximumBackoff_ = value;
}
} else {
maximumBackoffBuilder_.mergeFrom(value);
}
if (maximumBackoff_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. The maximum delay between consecutive deliveries of a given
* message. Value should be between 0 and 600 seconds. Defaults to 600
* seconds.
* </pre>
*
* <code>
* .google.protobuf.Duration maximum_backoff = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearMaximumBackoff() {
bitField0_ = (bitField0_ & ~0x00000002);
maximumBackoff_ = null;
if (maximumBackoffBuilder_ != null) {
maximumBackoffBuilder_.dispose();
maximumBackoffBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. The maximum delay between consecutive deliveries of a given
* message. Value should be between 0 and 600 seconds. Defaults to 600
* seconds.
* </pre>
*
* <code>
* .google.protobuf.Duration maximum_backoff = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.Duration.Builder getMaximumBackoffBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getMaximumBackoffFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. The maximum delay between consecutive deliveries of a given
* message. Value should be between 0 and 600 seconds. Defaults to 600
* seconds.
* </pre>
*
* <code>
* .google.protobuf.Duration maximum_backoff = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.DurationOrBuilder getMaximumBackoffOrBuilder() {
if (maximumBackoffBuilder_ != null) {
return maximumBackoffBuilder_.getMessageOrBuilder();
} else {
return maximumBackoff_ == null
? com.google.protobuf.Duration.getDefaultInstance()
: maximumBackoff_;
}
}
/**
*
*
* <pre>
* Optional. The maximum delay between consecutive deliveries of a given
* message. Value should be between 0 and 600 seconds. Defaults to 600
* seconds.
* </pre>
*
* <code>
* .google.protobuf.Duration maximum_backoff = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>
getMaximumBackoffFieldBuilder() {
if (maximumBackoffBuilder_ == null) {
maximumBackoffBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Duration,
com.google.protobuf.Duration.Builder,
com.google.protobuf.DurationOrBuilder>(
getMaximumBackoff(), getParentForChildren(), isClean());
maximumBackoff_ = null;
}
return maximumBackoffBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.bigquery.analyticshub.v1.RetryPolicy)
}
// @@protoc_insertion_point(class_scope:google.cloud.bigquery.analyticshub.v1.RetryPolicy)
private static final com.google.cloud.bigquery.analyticshub.v1.RetryPolicy DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.bigquery.analyticshub.v1.RetryPolicy();
}
public static com.google.cloud.bigquery.analyticshub.v1.RetryPolicy getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<RetryPolicy> PARSER =
new com.google.protobuf.AbstractParser<RetryPolicy>() {
@java.lang.Override
public RetryPolicy parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<RetryPolicy> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<RetryPolicy> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.bigquery.analyticshub.v1.RetryPolicy getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,171 | java-cloudsecuritycompliance/google-cloud-cloudsecuritycompliance/src/main/java/com/google/cloud/cloudsecuritycompliance/v1/stub/ConfigStubSettings.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.cloudsecuritycompliance.v1.stub;
import static com.google.cloud.cloudsecuritycompliance.v1.ConfigClient.ListCloudControlsPagedResponse;
import static com.google.cloud.cloudsecuritycompliance.v1.ConfigClient.ListFrameworksPagedResponse;
import static com.google.cloud.cloudsecuritycompliance.v1.ConfigClient.ListLocationsPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.BetaApi;
import com.google.api.core.ObsoleteApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.GrpcTransportChannel;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.httpjson.GaxHttpJsonProperties;
import com.google.api.gax.httpjson.HttpJsonTransportChannel;
import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.PagedListDescriptor;
import com.google.api.gax.rpc.PagedListResponseFactory;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.cloudsecuritycompliance.v1.CloudControl;
import com.google.cloud.cloudsecuritycompliance.v1.CreateCloudControlRequest;
import com.google.cloud.cloudsecuritycompliance.v1.CreateFrameworkRequest;
import com.google.cloud.cloudsecuritycompliance.v1.DeleteCloudControlRequest;
import com.google.cloud.cloudsecuritycompliance.v1.DeleteFrameworkRequest;
import com.google.cloud.cloudsecuritycompliance.v1.Framework;
import com.google.cloud.cloudsecuritycompliance.v1.GetCloudControlRequest;
import com.google.cloud.cloudsecuritycompliance.v1.GetFrameworkRequest;
import com.google.cloud.cloudsecuritycompliance.v1.ListCloudControlsRequest;
import com.google.cloud.cloudsecuritycompliance.v1.ListCloudControlsResponse;
import com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksRequest;
import com.google.cloud.cloudsecuritycompliance.v1.ListFrameworksResponse;
import com.google.cloud.cloudsecuritycompliance.v1.UpdateCloudControlRequest;
import com.google.cloud.cloudsecuritycompliance.v1.UpdateFrameworkRequest;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.protobuf.Empty;
import java.io.IOException;
import java.time.Duration;
import java.util.List;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link ConfigStub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (cloudsecuritycompliance.googleapis.com) and default port (443)
* are used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the
* [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings)
* of getFramework:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* ConfigStubSettings.Builder configSettingsBuilder = ConfigStubSettings.newBuilder();
* configSettingsBuilder
* .getFrameworkSettings()
* .setRetrySettings(
* configSettingsBuilder
* .getFrameworkSettings()
* .getRetrySettings()
* .toBuilder()
* .setInitialRetryDelayDuration(Duration.ofSeconds(1))
* .setInitialRpcTimeoutDuration(Duration.ofSeconds(5))
* .setMaxAttempts(5)
* .setMaxRetryDelayDuration(Duration.ofSeconds(30))
* .setMaxRpcTimeoutDuration(Duration.ofSeconds(60))
* .setRetryDelayMultiplier(1.3)
* .setRpcTimeoutMultiplier(1.5)
* .setTotalTimeoutDuration(Duration.ofSeconds(300))
* .build());
* ConfigStubSettings configSettings = configSettingsBuilder.build();
* }</pre>
*
* Please refer to the [Client Side Retry
* Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for
* additional support in setting retries.
*/
@Generated("by gapic-generator-java")
public class ConfigStubSettings extends StubSettings<ConfigStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder().add("https://www.googleapis.com/auth/cloud-platform").build();
private final PagedCallSettings<
ListFrameworksRequest, ListFrameworksResponse, ListFrameworksPagedResponse>
listFrameworksSettings;
private final UnaryCallSettings<GetFrameworkRequest, Framework> getFrameworkSettings;
private final UnaryCallSettings<CreateFrameworkRequest, Framework> createFrameworkSettings;
private final UnaryCallSettings<UpdateFrameworkRequest, Framework> updateFrameworkSettings;
private final UnaryCallSettings<DeleteFrameworkRequest, Empty> deleteFrameworkSettings;
private final PagedCallSettings<
ListCloudControlsRequest, ListCloudControlsResponse, ListCloudControlsPagedResponse>
listCloudControlsSettings;
private final UnaryCallSettings<GetCloudControlRequest, CloudControl> getCloudControlSettings;
private final UnaryCallSettings<CreateCloudControlRequest, CloudControl>
createCloudControlSettings;
private final UnaryCallSettings<UpdateCloudControlRequest, CloudControl>
updateCloudControlSettings;
private final UnaryCallSettings<DeleteCloudControlRequest, Empty> deleteCloudControlSettings;
private final PagedCallSettings<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings;
private final UnaryCallSettings<GetLocationRequest, Location> getLocationSettings;
private static final PagedListDescriptor<ListFrameworksRequest, ListFrameworksResponse, Framework>
LIST_FRAMEWORKS_PAGE_STR_DESC =
new PagedListDescriptor<ListFrameworksRequest, ListFrameworksResponse, Framework>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListFrameworksRequest injectToken(ListFrameworksRequest payload, String token) {
return ListFrameworksRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListFrameworksRequest injectPageSize(
ListFrameworksRequest payload, int pageSize) {
return ListFrameworksRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListFrameworksRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListFrameworksResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Framework> extractResources(ListFrameworksResponse payload) {
return payload.getFrameworksList();
}
};
private static final PagedListDescriptor<
ListCloudControlsRequest, ListCloudControlsResponse, CloudControl>
LIST_CLOUD_CONTROLS_PAGE_STR_DESC =
new PagedListDescriptor<
ListCloudControlsRequest, ListCloudControlsResponse, CloudControl>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListCloudControlsRequest injectToken(
ListCloudControlsRequest payload, String token) {
return ListCloudControlsRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListCloudControlsRequest injectPageSize(
ListCloudControlsRequest payload, int pageSize) {
return ListCloudControlsRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListCloudControlsRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListCloudControlsResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<CloudControl> extractResources(ListCloudControlsResponse payload) {
return payload.getCloudControlsList();
}
};
private static final PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location>
LIST_LOCATIONS_PAGE_STR_DESC =
new PagedListDescriptor<ListLocationsRequest, ListLocationsResponse, Location>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListLocationsRequest injectToken(ListLocationsRequest payload, String token) {
return ListLocationsRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListLocationsRequest injectPageSize(ListLocationsRequest payload, int pageSize) {
return ListLocationsRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListLocationsRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListLocationsResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Location> extractResources(ListLocationsResponse payload) {
return payload.getLocationsList();
}
};
private static final PagedListResponseFactory<
ListFrameworksRequest, ListFrameworksResponse, ListFrameworksPagedResponse>
LIST_FRAMEWORKS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListFrameworksRequest, ListFrameworksResponse, ListFrameworksPagedResponse>() {
@Override
public ApiFuture<ListFrameworksPagedResponse> getFuturePagedResponse(
UnaryCallable<ListFrameworksRequest, ListFrameworksResponse> callable,
ListFrameworksRequest request,
ApiCallContext context,
ApiFuture<ListFrameworksResponse> futureResponse) {
PageContext<ListFrameworksRequest, ListFrameworksResponse, Framework> pageContext =
PageContext.create(callable, LIST_FRAMEWORKS_PAGE_STR_DESC, request, context);
return ListFrameworksPagedResponse.createAsync(pageContext, futureResponse);
}
};
private static final PagedListResponseFactory<
ListCloudControlsRequest, ListCloudControlsResponse, ListCloudControlsPagedResponse>
LIST_CLOUD_CONTROLS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListCloudControlsRequest,
ListCloudControlsResponse,
ListCloudControlsPagedResponse>() {
@Override
public ApiFuture<ListCloudControlsPagedResponse> getFuturePagedResponse(
UnaryCallable<ListCloudControlsRequest, ListCloudControlsResponse> callable,
ListCloudControlsRequest request,
ApiCallContext context,
ApiFuture<ListCloudControlsResponse> futureResponse) {
PageContext<ListCloudControlsRequest, ListCloudControlsResponse, CloudControl>
pageContext =
PageContext.create(
callable, LIST_CLOUD_CONTROLS_PAGE_STR_DESC, request, context);
return ListCloudControlsPagedResponse.createAsync(pageContext, futureResponse);
}
};
private static final PagedListResponseFactory<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
LIST_LOCATIONS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>() {
@Override
public ApiFuture<ListLocationsPagedResponse> getFuturePagedResponse(
UnaryCallable<ListLocationsRequest, ListLocationsResponse> callable,
ListLocationsRequest request,
ApiCallContext context,
ApiFuture<ListLocationsResponse> futureResponse) {
PageContext<ListLocationsRequest, ListLocationsResponse, Location> pageContext =
PageContext.create(callable, LIST_LOCATIONS_PAGE_STR_DESC, request, context);
return ListLocationsPagedResponse.createAsync(pageContext, futureResponse);
}
};
/** Returns the object with the settings used for calls to listFrameworks. */
public PagedCallSettings<
ListFrameworksRequest, ListFrameworksResponse, ListFrameworksPagedResponse>
listFrameworksSettings() {
return listFrameworksSettings;
}
/** Returns the object with the settings used for calls to getFramework. */
public UnaryCallSettings<GetFrameworkRequest, Framework> getFrameworkSettings() {
return getFrameworkSettings;
}
/** Returns the object with the settings used for calls to createFramework. */
public UnaryCallSettings<CreateFrameworkRequest, Framework> createFrameworkSettings() {
return createFrameworkSettings;
}
/** Returns the object with the settings used for calls to updateFramework. */
public UnaryCallSettings<UpdateFrameworkRequest, Framework> updateFrameworkSettings() {
return updateFrameworkSettings;
}
/** Returns the object with the settings used for calls to deleteFramework. */
public UnaryCallSettings<DeleteFrameworkRequest, Empty> deleteFrameworkSettings() {
return deleteFrameworkSettings;
}
/** Returns the object with the settings used for calls to listCloudControls. */
public PagedCallSettings<
ListCloudControlsRequest, ListCloudControlsResponse, ListCloudControlsPagedResponse>
listCloudControlsSettings() {
return listCloudControlsSettings;
}
/** Returns the object with the settings used for calls to getCloudControl. */
public UnaryCallSettings<GetCloudControlRequest, CloudControl> getCloudControlSettings() {
return getCloudControlSettings;
}
/** Returns the object with the settings used for calls to createCloudControl. */
public UnaryCallSettings<CreateCloudControlRequest, CloudControl> createCloudControlSettings() {
return createCloudControlSettings;
}
/** Returns the object with the settings used for calls to updateCloudControl. */
public UnaryCallSettings<UpdateCloudControlRequest, CloudControl> updateCloudControlSettings() {
return updateCloudControlSettings;
}
/** Returns the object with the settings used for calls to deleteCloudControl. */
public UnaryCallSettings<DeleteCloudControlRequest, Empty> deleteCloudControlSettings() {
return deleteCloudControlSettings;
}
/** Returns the object with the settings used for calls to listLocations. */
public PagedCallSettings<ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings() {
return listLocationsSettings;
}
/** Returns the object with the settings used for calls to getLocation. */
public UnaryCallSettings<GetLocationRequest, Location> getLocationSettings() {
return getLocationSettings;
}
public ConfigStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(GrpcTransportChannel.getGrpcTransportName())) {
return GrpcConfigStub.create(this);
}
if (getTransportChannelProvider()
.getTransportName()
.equals(HttpJsonTransportChannel.getHttpJsonTransportName())) {
return HttpJsonConfigStub.create(this);
}
throw new UnsupportedOperationException(
String.format(
"Transport not supported: %s", getTransportChannelProvider().getTransportName()));
}
/** Returns the default service name. */
@Override
public String getServiceName() {
return "cloudsecuritycompliance";
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
@ObsoleteApi("Use getEndpoint() instead")
public static String getDefaultEndpoint() {
return "cloudsecuritycompliance.googleapis.com:443";
}
/** Returns the default mTLS service endpoint. */
public static String getDefaultMtlsEndpoint() {
return "cloudsecuritycompliance.mtls.googleapis.com:443";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder()
.setScopesToApply(DEFAULT_SERVICE_SCOPES)
.setUseJwtAccessWithScope(true);
}
/** Returns a builder for the default gRPC ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return InstantiatingGrpcChannelProvider.newBuilder()
.setMaxInboundMessageSize(Integer.MAX_VALUE);
}
/** Returns a builder for the default REST ChannelProvider for this service. */
@BetaApi
public static InstantiatingHttpJsonChannelProvider.Builder
defaultHttpJsonTransportProviderBuilder() {
return InstantiatingHttpJsonChannelProvider.newBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultGrpcTransportProviderBuilder().build();
}
public static ApiClientHeaderProvider.Builder defaultGrpcApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken("gapic", GaxProperties.getLibraryVersion(ConfigStubSettings.class))
.setTransportToken(
GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion());
}
public static ApiClientHeaderProvider.Builder defaultHttpJsonApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken("gapic", GaxProperties.getLibraryVersion(ConfigStubSettings.class))
.setTransportToken(
GaxHttpJsonProperties.getHttpJsonTokenName(),
GaxHttpJsonProperties.getHttpJsonVersion());
}
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return ConfigStubSettings.defaultGrpcApiClientHeaderProviderBuilder();
}
/** Returns a new gRPC builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new REST builder for this class. */
public static Builder newHttpJsonBuilder() {
return Builder.createHttpJsonDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected ConfigStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
listFrameworksSettings = settingsBuilder.listFrameworksSettings().build();
getFrameworkSettings = settingsBuilder.getFrameworkSettings().build();
createFrameworkSettings = settingsBuilder.createFrameworkSettings().build();
updateFrameworkSettings = settingsBuilder.updateFrameworkSettings().build();
deleteFrameworkSettings = settingsBuilder.deleteFrameworkSettings().build();
listCloudControlsSettings = settingsBuilder.listCloudControlsSettings().build();
getCloudControlSettings = settingsBuilder.getCloudControlSettings().build();
createCloudControlSettings = settingsBuilder.createCloudControlSettings().build();
updateCloudControlSettings = settingsBuilder.updateCloudControlSettings().build();
deleteCloudControlSettings = settingsBuilder.deleteCloudControlSettings().build();
listLocationsSettings = settingsBuilder.listLocationsSettings().build();
getLocationSettings = settingsBuilder.getLocationSettings().build();
}
/** Builder for ConfigStubSettings. */
public static class Builder extends StubSettings.Builder<ConfigStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final PagedCallSettings.Builder<
ListFrameworksRequest, ListFrameworksResponse, ListFrameworksPagedResponse>
listFrameworksSettings;
private final UnaryCallSettings.Builder<GetFrameworkRequest, Framework> getFrameworkSettings;
private final UnaryCallSettings.Builder<CreateFrameworkRequest, Framework>
createFrameworkSettings;
private final UnaryCallSettings.Builder<UpdateFrameworkRequest, Framework>
updateFrameworkSettings;
private final UnaryCallSettings.Builder<DeleteFrameworkRequest, Empty> deleteFrameworkSettings;
private final PagedCallSettings.Builder<
ListCloudControlsRequest, ListCloudControlsResponse, ListCloudControlsPagedResponse>
listCloudControlsSettings;
private final UnaryCallSettings.Builder<GetCloudControlRequest, CloudControl>
getCloudControlSettings;
private final UnaryCallSettings.Builder<CreateCloudControlRequest, CloudControl>
createCloudControlSettings;
private final UnaryCallSettings.Builder<UpdateCloudControlRequest, CloudControl>
updateCloudControlSettings;
private final UnaryCallSettings.Builder<DeleteCloudControlRequest, Empty>
deleteCloudControlSettings;
private final PagedCallSettings.Builder<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings;
private final UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put(
"retry_policy_0_codes",
ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList(StatusCode.Code.UNAVAILABLE)));
definitions.put(
"no_retry_1_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings =
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(1000L))
.setRetryDelayMultiplier(1.3)
.setMaxRetryDelayDuration(Duration.ofMillis(10000L))
.setInitialRpcTimeoutDuration(Duration.ofMillis(60000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(60000L))
.setTotalTimeoutDuration(Duration.ofMillis(60000L))
.build();
definitions.put("retry_policy_0_params", settings);
settings =
RetrySettings.newBuilder()
.setInitialRpcTimeoutDuration(Duration.ofMillis(60000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(60000L))
.setTotalTimeoutDuration(Duration.ofMillis(60000L))
.build();
definitions.put("no_retry_1_params", settings);
settings = RetrySettings.newBuilder().setRpcTimeoutMultiplier(1.0).build();
definitions.put("no_retry_params", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(clientContext);
listFrameworksSettings = PagedCallSettings.newBuilder(LIST_FRAMEWORKS_PAGE_STR_FACT);
getFrameworkSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
createFrameworkSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
updateFrameworkSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
deleteFrameworkSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
listCloudControlsSettings = PagedCallSettings.newBuilder(LIST_CLOUD_CONTROLS_PAGE_STR_FACT);
getCloudControlSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
createCloudControlSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
updateCloudControlSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
deleteCloudControlSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
listLocationsSettings = PagedCallSettings.newBuilder(LIST_LOCATIONS_PAGE_STR_FACT);
getLocationSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
listFrameworksSettings,
getFrameworkSettings,
createFrameworkSettings,
updateFrameworkSettings,
deleteFrameworkSettings,
listCloudControlsSettings,
getCloudControlSettings,
createCloudControlSettings,
updateCloudControlSettings,
deleteCloudControlSettings,
listLocationsSettings,
getLocationSettings);
initDefaults(this);
}
protected Builder(ConfigStubSettings settings) {
super(settings);
listFrameworksSettings = settings.listFrameworksSettings.toBuilder();
getFrameworkSettings = settings.getFrameworkSettings.toBuilder();
createFrameworkSettings = settings.createFrameworkSettings.toBuilder();
updateFrameworkSettings = settings.updateFrameworkSettings.toBuilder();
deleteFrameworkSettings = settings.deleteFrameworkSettings.toBuilder();
listCloudControlsSettings = settings.listCloudControlsSettings.toBuilder();
getCloudControlSettings = settings.getCloudControlSettings.toBuilder();
createCloudControlSettings = settings.createCloudControlSettings.toBuilder();
updateCloudControlSettings = settings.updateCloudControlSettings.toBuilder();
deleteCloudControlSettings = settings.deleteCloudControlSettings.toBuilder();
listLocationsSettings = settings.listLocationsSettings.toBuilder();
getLocationSettings = settings.getLocationSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
listFrameworksSettings,
getFrameworkSettings,
createFrameworkSettings,
updateFrameworkSettings,
deleteFrameworkSettings,
listCloudControlsSettings,
getCloudControlSettings,
createCloudControlSettings,
updateCloudControlSettings,
deleteCloudControlSettings,
listLocationsSettings,
getLocationSettings);
}
private static Builder createDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder createHttpJsonDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultHttpJsonTransportProviderBuilder().build());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultHttpJsonApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.listFrameworksSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getFrameworkSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.createFrameworkSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.updateFrameworkSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.deleteFrameworkSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.listCloudControlsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getCloudControlSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.createCloudControlSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.updateCloudControlSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.deleteCloudControlSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.listLocationsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.getLocationSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
return builder;
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to listFrameworks. */
public PagedCallSettings.Builder<
ListFrameworksRequest, ListFrameworksResponse, ListFrameworksPagedResponse>
listFrameworksSettings() {
return listFrameworksSettings;
}
/** Returns the builder for the settings used for calls to getFramework. */
public UnaryCallSettings.Builder<GetFrameworkRequest, Framework> getFrameworkSettings() {
return getFrameworkSettings;
}
/** Returns the builder for the settings used for calls to createFramework. */
public UnaryCallSettings.Builder<CreateFrameworkRequest, Framework> createFrameworkSettings() {
return createFrameworkSettings;
}
/** Returns the builder for the settings used for calls to updateFramework. */
public UnaryCallSettings.Builder<UpdateFrameworkRequest, Framework> updateFrameworkSettings() {
return updateFrameworkSettings;
}
/** Returns the builder for the settings used for calls to deleteFramework. */
public UnaryCallSettings.Builder<DeleteFrameworkRequest, Empty> deleteFrameworkSettings() {
return deleteFrameworkSettings;
}
/** Returns the builder for the settings used for calls to listCloudControls. */
public PagedCallSettings.Builder<
ListCloudControlsRequest, ListCloudControlsResponse, ListCloudControlsPagedResponse>
listCloudControlsSettings() {
return listCloudControlsSettings;
}
/** Returns the builder for the settings used for calls to getCloudControl. */
public UnaryCallSettings.Builder<GetCloudControlRequest, CloudControl>
getCloudControlSettings() {
return getCloudControlSettings;
}
/** Returns the builder for the settings used for calls to createCloudControl. */
public UnaryCallSettings.Builder<CreateCloudControlRequest, CloudControl>
createCloudControlSettings() {
return createCloudControlSettings;
}
/** Returns the builder for the settings used for calls to updateCloudControl. */
public UnaryCallSettings.Builder<UpdateCloudControlRequest, CloudControl>
updateCloudControlSettings() {
return updateCloudControlSettings;
}
/** Returns the builder for the settings used for calls to deleteCloudControl. */
public UnaryCallSettings.Builder<DeleteCloudControlRequest, Empty>
deleteCloudControlSettings() {
return deleteCloudControlSettings;
}
/** Returns the builder for the settings used for calls to listLocations. */
public PagedCallSettings.Builder<
ListLocationsRequest, ListLocationsResponse, ListLocationsPagedResponse>
listLocationsSettings() {
return listLocationsSettings;
}
/** Returns the builder for the settings used for calls to getLocation. */
public UnaryCallSettings.Builder<GetLocationRequest, Location> getLocationSettings() {
return getLocationSettings;
}
@Override
public ConfigStubSettings build() throws IOException {
return new ConfigStubSettings(this);
}
}
}
|
apache/ignite | 37,127 | modules/calcite/src/main/java/org/apache/ignite/internal/processors/query/calcite/util/RexUtils.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.ignite.internal.processors.query.calcite.util;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.Comparator;
import java.util.EnumSet;
import java.util.HashMap;
import java.util.HashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.calcite.linq4j.Ord;
import org.apache.calcite.plan.RelOptCluster;
import org.apache.calcite.plan.RelOptPredicateList;
import org.apache.calcite.plan.RelOptUtil;
import org.apache.calcite.rel.RelCollation;
import org.apache.calcite.rel.RelFieldCollation;
import org.apache.calcite.rel.RelNode;
import org.apache.calcite.rel.core.CorrelationId;
import org.apache.calcite.rel.type.RelDataType;
import org.apache.calcite.rel.type.RelDataTypeField;
import org.apache.calcite.rex.RexBuilder;
import org.apache.calcite.rex.RexCall;
import org.apache.calcite.rex.RexCorrelVariable;
import org.apache.calcite.rex.RexDynamicParam;
import org.apache.calcite.rex.RexExecutor;
import org.apache.calcite.rex.RexFieldAccess;
import org.apache.calcite.rex.RexInputRef;
import org.apache.calcite.rex.RexLiteral;
import org.apache.calcite.rex.RexLocalRef;
import org.apache.calcite.rex.RexNode;
import org.apache.calcite.rex.RexProgram;
import org.apache.calcite.rex.RexShuttle;
import org.apache.calcite.rex.RexSimplify;
import org.apache.calcite.rex.RexSlot;
import org.apache.calcite.rex.RexUnknownAs;
import org.apache.calcite.rex.RexUtil;
import org.apache.calcite.rex.RexVisitor;
import org.apache.calcite.rex.RexVisitorImpl;
import org.apache.calcite.sql.SqlKind;
import org.apache.calcite.sql.SqlOperator;
import org.apache.calcite.sql.fun.SqlStdOperatorTable;
import org.apache.calcite.sql.type.SqlTypeName;
import org.apache.calcite.util.ControlFlowException;
import org.apache.calcite.util.ImmutableBitSet;
import org.apache.calcite.util.Litmus;
import org.apache.calcite.util.Pair;
import org.apache.calcite.util.Sarg;
import org.apache.calcite.util.Util;
import org.apache.calcite.util.mapping.MappingType;
import org.apache.calcite.util.mapping.Mappings;
import org.apache.ignite.internal.processors.query.calcite.prepare.bounds.ExactBounds;
import org.apache.ignite.internal.processors.query.calcite.prepare.bounds.MultiBounds;
import org.apache.ignite.internal.processors.query.calcite.prepare.bounds.RangeBounds;
import org.apache.ignite.internal.processors.query.calcite.prepare.bounds.SearchBounds;
import org.apache.ignite.internal.processors.query.calcite.sql.fun.IgniteOwnSqlOperatorTable;
import org.apache.ignite.internal.processors.query.calcite.trait.TraitUtils;
import org.apache.ignite.internal.util.typedef.F;
import org.apache.ignite.internal.util.typedef.internal.U;
import org.jetbrains.annotations.Nullable;
import static java.util.Objects.requireNonNull;
import static org.apache.calcite.rex.RexUtil.removeCast;
import static org.apache.calcite.rex.RexUtil.sargRef;
import static org.apache.calcite.sql.SqlKind.EQUALS;
import static org.apache.calcite.sql.SqlKind.GREATER_THAN;
import static org.apache.calcite.sql.SqlKind.GREATER_THAN_OR_EQUAL;
import static org.apache.calcite.sql.SqlKind.IS_NOT_DISTINCT_FROM;
import static org.apache.calcite.sql.SqlKind.IS_NOT_NULL;
import static org.apache.calcite.sql.SqlKind.IS_NULL;
import static org.apache.calcite.sql.SqlKind.LESS_THAN;
import static org.apache.calcite.sql.SqlKind.LESS_THAN_OR_EQUAL;
import static org.apache.calcite.sql.SqlKind.NOT;
import static org.apache.calcite.sql.SqlKind.OR;
import static org.apache.calcite.sql.SqlKind.SEARCH;
/** */
public class RexUtils {
/** Maximum amount of search bounds tuples per scan. */
public static final int MAX_SEARCH_BOUNDS_COMPLEXITY = 100;
/** */
public static RexNode makeCast(RexBuilder builder, RexNode node, RelDataType type) {
return TypeUtils.needCast(builder.getTypeFactory(), node.getType(), type) ? builder.makeCast(type, node) : node;
}
/** */
public static RexBuilder builder(RelNode rel) {
return builder(rel.getCluster());
}
/** */
public static RexBuilder builder(RelOptCluster cluster) {
return cluster.getRexBuilder();
}
/** */
public static RexExecutor executor(RelNode rel) {
return executor(rel.getCluster());
}
/** */
public static RexExecutor executor(RelOptCluster cluster) {
return Util.first(cluster.getPlanner().getExecutor(), RexUtil.EXECUTOR);
}
/** */
public static RexSimplify simplifier(RelOptCluster cluster) {
return new RexSimplify(builder(cluster), RelOptPredicateList.EMPTY, executor(cluster));
}
/** */
public static RexNode makeCase(RexBuilder builder, RexNode... operands) {
if (U.assertionsEnabled()) {
// each odd operand except last one has to return a boolean type
for (int i = 0; i < operands.length; i += 2) {
if (operands[i].getType().getSqlTypeName() != SqlTypeName.BOOLEAN && i < operands.length - 1) {
throw new AssertionError("Unexpected operand type. " +
"[operands=" + Arrays.toString(operands) + "]");
}
}
}
return builder.makeCall(SqlStdOperatorTable.CASE, operands);
}
/** Returns whether a list of expressions projects the incoming fields. */
public static boolean isIdentity(List<? extends RexNode> projects, RelDataType inputRowType) {
return isIdentity(projects, inputRowType, false);
}
/** Returns whether a list of expressions projects the incoming fields. */
public static boolean isIdentity(List<? extends RexNode> projects, RelDataType inputRowType, boolean local) {
if (inputRowType.getFieldCount() != projects.size())
return false;
final List<RelDataTypeField> fields = inputRowType.getFieldList();
Class<? extends RexSlot> clazz = local ? RexLocalRef.class : RexInputRef.class;
for (int i = 0; i < fields.size(); i++) {
if (!clazz.isInstance(projects.get(i)))
return false;
RexSlot ref = (RexSlot)projects.get(i);
if (ref.getIndex() != i)
return false;
if (!RelOptUtil.eq("t1", projects.get(i).getType(), "t2", fields.get(i).getType(), Litmus.IGNORE))
return false;
}
return true;
}
/** Binary comparison operations. */
private static final Set<SqlKind> BINARY_COMPARISON =
EnumSet.of(EQUALS, IS_NOT_DISTINCT_FROM, LESS_THAN, GREATER_THAN, GREATER_THAN_OR_EQUAL, LESS_THAN_OR_EQUAL);
/** Supported index operations. */
private static final Set<SqlKind> TREE_INDEX_COMPARISON =
EnumSet.of(
SEARCH,
IS_NULL,
IS_NOT_NULL,
EQUALS,
IS_NOT_DISTINCT_FROM,
LESS_THAN, GREATER_THAN,
GREATER_THAN_OR_EQUAL, LESS_THAN_OR_EQUAL);
/**
* Builds sorted search bounds.
*/
public static List<SearchBounds> buildSortedSearchBounds(
RelOptCluster cluster,
RelCollation collation,
RexNode condition,
RelDataType rowType,
ImmutableBitSet requiredColumns
) {
if (condition == null)
return null;
condition = RexUtil.toCnf(builder(cluster), condition);
Map<Integer, List<RexCall>> fieldsToPredicates = mapPredicatesToFields(condition, cluster);
if (F.isEmpty(fieldsToPredicates))
return null;
// Force collation for all fields of the condition.
if (collation == null || collation.isDefault()) {
List<Integer> equalsFields = new ArrayList<>(fieldsToPredicates.size());
List<Integer> otherFields = new ArrayList<>(fieldsToPredicates.size());
// It's more effective to put equality conditions in the collation first.
fieldsToPredicates.forEach((idx, conds) ->
(F.exist(conds, call -> call.getOperator().getKind() == EQUALS) ? equalsFields : otherFields).add(idx));
collation = TraitUtils.createCollation(F.concat(true, equalsFields, otherFields));
}
List<RelDataType> types = RelOptUtil.getFieldTypeList(rowType);
Mappings.TargetMapping mapping = null;
if (requiredColumns != null)
mapping = Commons.inverseMapping(requiredColumns, types.size());
List<SearchBounds> bounds = Arrays.asList(new SearchBounds[types.size()]);
boolean boundsEmpty = true;
int prevComplexity = 1;
for (int i = 0; i < collation.getFieldCollations().size(); i++) {
RelFieldCollation fc = collation.getFieldCollations().get(i);
int collFldIdx = fc.getFieldIndex();
List<RexCall> collFldPreds = fieldsToPredicates.get(collFldIdx);
if (F.isEmpty(collFldPreds))
break;
if (mapping != null)
collFldIdx = mapping.getSourceOpt(collFldIdx);
SearchBounds fldBounds = createBounds(fc, collFldPreds, cluster, types.get(collFldIdx), prevComplexity);
if (fldBounds == null)
break;
boundsEmpty = false;
bounds.set(collFldIdx, fldBounds);
if (fldBounds instanceof MultiBounds) {
prevComplexity *= ((MultiBounds)fldBounds).bounds().size();
// Any bounds after multi range bounds are not allowed, since it can cause intervals intersection.
if (((MultiBounds)fldBounds).bounds().stream().anyMatch(b -> b.type() != SearchBounds.Type.EXACT))
break;
}
if (fldBounds.type() == SearchBounds.Type.RANGE)
break; // TODO https://issues.apache.org/jira/browse/IGNITE-13568
}
return boundsEmpty ? null : bounds;
}
/**
* Builds hash index search bounds.
*/
public static List<SearchBounds> buildHashSearchBounds(
RelOptCluster cluster,
RexNode condition,
RelDataType rowType,
ImmutableBitSet requiredColumns,
boolean ignoreNotEqualPreds
) {
condition = RexUtil.toCnf(builder(cluster), condition);
Map<Integer, List<RexCall>> fieldsToPredicates = mapPredicatesToFields(condition, cluster);
if (F.isEmpty(fieldsToPredicates))
return null;
List<SearchBounds> bounds = null;
List<RelDataType> types = RelOptUtil.getFieldTypeList(rowType);
Mappings.TargetMapping mapping = null;
if (requiredColumns != null)
mapping = Commons.inverseMapping(requiredColumns, types.size());
for (int fldIdx : fieldsToPredicates.keySet()) {
List<RexCall> collFldPreds = fieldsToPredicates.get(fldIdx);
if (F.isEmpty(collFldPreds))
break;
for (RexCall pred : collFldPreds) {
if (pred.getOperator().kind != SqlKind.EQUALS && pred.getOperator().kind != IS_NOT_DISTINCT_FROM) {
if (ignoreNotEqualPreds)
continue;
else // Only EQUALS predicates allowed in condition.
return null;
}
if (bounds == null)
bounds = Arrays.asList(new SearchBounds[types.size()]);
if (mapping != null)
fldIdx = mapping.getSourceOpt(fldIdx);
bounds.set(fldIdx, new ExactBounds(pred,
makeCast(builder(cluster), removeCast(pred.operands.get(1)), types.get(fldIdx))));
}
}
return bounds;
}
/** Create index search bound by conditions of the field. */
private static SearchBounds createBounds(
@Nullable RelFieldCollation fc, // Can be null for EQUALS condition.
List<RexCall> collFldPreds,
RelOptCluster cluster,
RelDataType fldType,
int prevComplexity
) {
RexBuilder builder = builder(cluster);
RexNode nullBound = builder.makeCall(IgniteOwnSqlOperatorTable.NULL_BOUND);
RexNode upperCond = null;
RexNode lowerCond = null;
RexNode upperBound = null;
RexNode lowerBound = null;
boolean upperInclude = true;
boolean lowerInclude = true;
// Give priority to equality operators.
collFldPreds.sort(Comparator.comparingInt(pred -> {
switch (pred.getOperator().getKind()) {
case EQUALS:
case IS_NOT_DISTINCT_FROM:
case IS_NULL:
return 0;
default:
return 1;
}
}));
for (RexCall pred : collFldPreds) {
RexNode val = null;
RexNode ref = pred.getOperands().get(0);
if (isBinaryComparison(pred)) {
val = removeCast(pred.operands.get(1));
assert idxOpSupports(val) : val;
val = makeCast(builder, val, fldType);
}
SqlOperator op = pred.getOperator();
if (op.kind == EQUALS)
return new ExactBounds(pred, val);
if (op.kind == IS_NOT_DISTINCT_FROM)
return new ExactBounds(pred, builder.makeCall(SqlStdOperatorTable.COALESCE, val, nullBound));
else if (op.kind == IS_NULL)
return new ExactBounds(pred, nullBound);
else if (op.kind == OR) {
List<SearchBounds> orBounds = new ArrayList<>();
int curComplexity = 0;
for (RexNode operand : pred.getOperands()) {
SearchBounds opBounds = createBounds(fc, Collections.singletonList((RexCall)operand),
cluster, fldType, prevComplexity);
if (opBounds instanceof MultiBounds) {
curComplexity += ((MultiBounds)opBounds).bounds().size();
orBounds.addAll(((MultiBounds)opBounds).bounds());
}
else if (opBounds != null) {
curComplexity++;
orBounds.add(opBounds);
}
if (opBounds == null || curComplexity > MAX_SEARCH_BOUNDS_COMPLEXITY) {
orBounds = null;
break;
}
}
if (orBounds == null)
continue;
return new MultiBounds(pred, orBounds);
}
else if (op.kind == SEARCH) {
Sarg<?> sarg = ((RexLiteral)pred.operands.get(1)).getValueAs(Sarg.class);
List<SearchBounds> bounds = expandSargToBounds(fc, cluster, fldType, prevComplexity, sarg, ref);
if (bounds == null)
continue;
if (bounds.size() == 1) {
if (bounds.get(0) instanceof RangeBounds && collFldPreds.size() > 1) {
// Try to merge bounds.
boolean ascDir = !fc.getDirection().isDescending();
RangeBounds rangeBounds = (RangeBounds)bounds.get(0);
if (rangeBounds.lowerBound() != null) {
if (lowerBound != null && lowerBound != nullBound) {
lowerBound = leastOrGreatest(builder, !ascDir, lowerBound, rangeBounds.lowerBound());
lowerInclude |= rangeBounds.lowerInclude();
}
else {
lowerBound = rangeBounds.lowerBound();
lowerInclude = rangeBounds.lowerInclude();
}
lowerCond = lessOrGreater(builder, !ascDir, lowerInclude, ref, lowerBound);
}
if (rangeBounds.upperBound() != null) {
if (upperBound != null && upperBound != nullBound) {
upperBound = leastOrGreatest(builder, ascDir, upperBound, rangeBounds.upperBound());
upperInclude |= rangeBounds.upperInclude();
}
else {
upperBound = rangeBounds.upperBound();
upperInclude = rangeBounds.upperInclude();
}
upperCond = lessOrGreater(builder, ascDir, upperInclude, ref, upperBound);
}
continue;
}
else
return bounds.get(0);
}
return new MultiBounds(pred, bounds);
}
// Range bounds.
boolean lowerBoundBelow = !fc.getDirection().isDescending();
boolean includeBound = op.kind == GREATER_THAN_OR_EQUAL || op.kind == LESS_THAN_OR_EQUAL;
boolean lessCondition = false;
switch (op.kind) {
case LESS_THAN:
case LESS_THAN_OR_EQUAL:
lessCondition = true;
lowerBoundBelow = !lowerBoundBelow;
// Fall through.
case GREATER_THAN:
case GREATER_THAN_OR_EQUAL:
if (lowerBoundBelow) {
if (lowerBound == null || lowerBound == nullBound) {
lowerCond = pred;
lowerBound = val;
lowerInclude = includeBound;
}
else {
lowerBound = leastOrGreatest(builder, lessCondition, lowerBound, val);
lowerInclude |= includeBound;
lowerCond = lessOrGreater(builder, lessCondition, lowerInclude, ref, lowerBound);
}
}
else {
if (upperBound == null || upperBound == nullBound) {
upperCond = pred;
upperBound = val;
upperInclude = includeBound;
}
else {
upperBound = leastOrGreatest(builder, lessCondition, upperBound, val);
upperInclude |= includeBound;
upperCond = lessOrGreater(builder, lessCondition, upperInclude, ref, upperBound);
}
}
// Fall through.
case IS_NOT_NULL:
if (fc.nullDirection == RelFieldCollation.NullDirection.FIRST && lowerBound == null) {
lowerCond = pred;
lowerBound = nullBound;
lowerInclude = false;
}
else if (fc.nullDirection == RelFieldCollation.NullDirection.LAST && upperBound == null) {
upperCond = pred;
upperBound = nullBound;
upperInclude = false;
}
break;
default:
throw new AssertionError("Unknown condition: " + op.kind);
}
}
if (lowerBound == null && upperBound == null)
return null; // No bounds.
// Found upper bound, lower bound or both.
RexNode cond = lowerCond == null ? upperCond :
upperCond == null ? lowerCond :
upperCond == lowerCond ? lowerCond : builder.makeCall(SqlStdOperatorTable.AND, lowerCond, upperCond);
return new RangeBounds(cond, lowerBound, upperBound, lowerInclude, upperInclude);
}
/** */
private static List<SearchBounds> expandSargToBounds(
RelFieldCollation fc,
RelOptCluster cluster,
RelDataType fldType,
int prevComplexity,
Sarg<?> sarg,
RexNode ref
) {
int complexity = prevComplexity * sarg.complexity();
// Limit amount of search bounds tuples.
if (complexity > MAX_SEARCH_BOUNDS_COMPLEXITY)
return null;
RexBuilder builder = builder(cluster);
RexNode sargCond = sargRef(builder, ref, sarg, fldType, RexUnknownAs.UNKNOWN);
List<RexNode> disjunctions = RelOptUtil.disjunctions(RexUtil.toDnf(builder, sargCond));
List<SearchBounds> bounds = new ArrayList<>(disjunctions.size());
for (RexNode bound : disjunctions) {
List<RexNode> conjunctions = RelOptUtil.conjunctions(bound);
List<RexCall> calls = new ArrayList<>(conjunctions.size());
for (RexNode rexNode : conjunctions) {
if (isSupportedTreeComparison(rexNode))
calls.add((RexCall)rexNode);
else // Cannot filter using this predicate (NOT_EQUALS for example), give a chance to other predicates.
return null;
}
bounds.add(createBounds(fc, calls, cluster, fldType, complexity));
}
return bounds;
}
/** */
private static RexNode leastOrGreatest(RexBuilder builder, boolean least, RexNode arg0, RexNode arg1) {
return builder.makeCall(
least ? IgniteOwnSqlOperatorTable.LEAST2 : IgniteOwnSqlOperatorTable.GREATEST2,
arg0,
arg1
);
}
/** */
private static RexNode lessOrGreater(
RexBuilder builder,
boolean less,
boolean includeBound,
RexNode arg0,
RexNode arg1
) {
return builder.makeCall(less ?
(includeBound ? SqlStdOperatorTable.LESS_THAN_OR_EQUAL : SqlStdOperatorTable.LESS_THAN) :
(includeBound ? SqlStdOperatorTable.GREATER_THAN_OR_EQUAL : SqlStdOperatorTable.GREATER_THAN),
arg0, arg1);
}
/** */
private static Map<Integer, List<RexCall>> mapPredicatesToFields(RexNode condition, RelOptCluster cluster) {
List<RexNode> conjunctions = RelOptUtil.conjunctions(condition);
Map<Integer, List<RexCall>> res = new HashMap<>(conjunctions.size());
for (RexNode rexNode : conjunctions) {
Pair<Integer, RexCall> refPredicate = null;
if (rexNode instanceof RexCall && rexNode.getKind() == OR) {
List<RexNode> operands = ((RexCall)rexNode).getOperands();
Integer ref = null;
List<RexCall> preds = new ArrayList<>(operands.size());
for (RexNode operand : operands) {
Pair<Integer, RexCall> operandRefPredicate = extractRefPredicate(operand, cluster);
// Skip the whole OR condition if any operand does not support tree comparison or not on reference.
if (operandRefPredicate == null) {
ref = null;
break;
}
// Ensure that we have the same field reference in all operands.
if (ref == null)
ref = operandRefPredicate.getKey();
else if (!ref.equals(operandRefPredicate.getKey())) {
ref = null;
break;
}
// For correlated variables it's required to resort and merge ranges on each nested loop,
// don't support it now.
if (containsFieldAccess(operandRefPredicate.getValue())) {
ref = null;
break;
}
preds.add(operandRefPredicate.getValue());
}
if (ref != null)
refPredicate = Pair.of(ref, (RexCall)builder(cluster).makeCall(((RexCall)rexNode).getOperator(), preds));
}
else
refPredicate = extractRefPredicate(rexNode, cluster);
if (refPredicate != null) {
List<RexCall> fldPreds = res.computeIfAbsent(refPredicate.getKey(), k -> new ArrayList<>(conjunctions.size()));
fldPreds.add(refPredicate.getValue());
}
}
return res;
}
/** */
private static Pair<Integer, RexCall> extractRefPredicate(RexNode rexNode, RelOptCluster cluster) {
rexNode = expandBooleanFieldComparison(rexNode, builder(cluster));
if (!isSupportedTreeComparison(rexNode))
return null;
RexCall predCall = (RexCall)rexNode;
RexSlot ref;
if (isBinaryComparison(rexNode)) {
ref = (RexSlot)extractRefFromBinary(predCall, cluster);
if (ref == null)
return null;
// Let RexLocalRef be on the left side.
if (refOnTheRight(predCall))
predCall = (RexCall)invert(builder(cluster), predCall);
}
else {
ref = (RexSlot)extractRefFromOperand(predCall, cluster, 0);
if (ref == null)
return null;
}
return Pair.of(ref.getIndex(), predCall);
}
/** Extended version of RexUtil.invert with additional operators support. */
private static RexNode invert(RexBuilder rexBuilder, RexCall call) {
if (call.getOperator() == SqlStdOperatorTable.IS_NOT_DISTINCT_FROM)
return rexBuilder.makeCall(call.getOperator(), call.getOperands().get(1), call.getOperands().get(0));
else
return RexUtil.invert(rexBuilder, call);
}
/** */
private static RexNode expandBooleanFieldComparison(RexNode rexNode, RexBuilder builder) {
if (rexNode instanceof RexSlot)
return builder.makeCall(SqlStdOperatorTable.EQUALS, rexNode, builder.makeLiteral(true));
else if (rexNode instanceof RexCall && rexNode.getKind() == NOT &&
((RexCall)rexNode).getOperands().get(0) instanceof RexSlot) {
return builder.makeCall(SqlStdOperatorTable.EQUALS, ((RexCall)rexNode).getOperands().get(0),
builder.makeLiteral(false));
}
return rexNode;
}
/** */
private static RexNode extractRefFromBinary(RexCall call, RelOptCluster cluster) {
assert isBinaryComparison(call);
RexNode leftRef = extractRefFromOperand(call, cluster, 0);
RexNode rightOp = call.getOperands().get(1);
if (leftRef != null)
return idxOpSupports(removeCast(rightOp)) ? leftRef : null;
RexNode rightRef = extractRefFromOperand(call, cluster, 1);
RexNode leftOp = call.getOperands().get(0);
if (rightRef != null)
return idxOpSupports(removeCast(leftOp)) ? rightRef : null;
return null;
}
/** */
private static RexNode extractRefFromOperand(RexCall call, RelOptCluster cluster, int operandNum) {
assert isSupportedTreeComparison(call);
RexNode op = call.getOperands().get(operandNum);
op = removeCast(op);
// Can proceed without ref cast only if cast was redundant in terms of values comparison.
if ((op instanceof RexSlot) &&
!TypeUtils.needCast(cluster.getTypeFactory(), op.getType(), call.getOperands().get(operandNum).getType()))
return op;
return null;
}
/** */
private static boolean refOnTheRight(RexCall predCall) {
RexNode rightOp = predCall.getOperands().get(1);
rightOp = removeCast(rightOp);
return rightOp.isA(SqlKind.LOCAL_REF) || rightOp.isA(SqlKind.INPUT_REF);
}
/** */
private static boolean isBinaryComparison(RexNode exp) {
return BINARY_COMPARISON.contains(exp.getKind()) &&
(exp instanceof RexCall) &&
((RexCall)exp).getOperands().size() == 2;
}
/** */
private static boolean isSupportedTreeComparison(RexNode exp) {
return TREE_INDEX_COMPARISON.contains(exp.getKind()) &&
(exp instanceof RexCall);
}
/** */
private static boolean idxOpSupports(RexNode op) {
return op instanceof RexLiteral
|| op instanceof RexDynamicParam
|| op instanceof RexFieldAccess
|| !containsRef(op);
}
/** */
public static boolean isNotNull(RexNode op) {
if (op == null)
return false;
return !(op instanceof RexLiteral) || !((RexLiteral)op).isNull();
}
/** */
public static Mappings.TargetMapping inversePermutation(List<RexNode> nodes, RelDataType inputRowType, boolean local) {
final Mappings.TargetMapping mapping =
Mappings.create(MappingType.INVERSE_FUNCTION, nodes.size(), inputRowType.getFieldCount());
Class<? extends RexSlot> clazz = local ? RexLocalRef.class : RexInputRef.class;
for (Ord<RexNode> node : Ord.zip(nodes)) {
if (clazz.isInstance(node.e))
mapping.set(node.i, ((RexSlot)node.e).getIndex());
}
return mapping;
}
/** */
public static List<RexNode> replaceLocalRefs(List<RexNode> nodes) {
return LocalRefReplacer.INSTANCE.apply(nodes);
}
/** */
public static List<RexNode> replaceInputRefs(List<RexNode> nodes) {
return InputRefReplacer.INSTANCE.apply(nodes);
}
/** */
public static RexNode replaceLocalRefs(RexNode node) {
return LocalRefReplacer.INSTANCE.apply(node);
}
/** */
public static RexNode replaceInputRefs(RexNode node) {
return InputRefReplacer.INSTANCE.apply(node);
}
/** */
public static boolean hasCorrelation(RexNode node) {
return hasCorrelation(Collections.singletonList(node));
}
/** */
public static boolean hasCorrelation(List<RexNode> nodes) {
try {
RexVisitor<Void> v = new RexVisitorImpl<Void>(true) {
@Override public Void visitCorrelVariable(RexCorrelVariable correlVariable) {
throw new ControlFlowException();
}
};
nodes.forEach(n -> n.accept(v));
return false;
}
catch (ControlFlowException e) {
return true;
}
}
/** */
public static Set<CorrelationId> extractCorrelationIds(RexNode node) {
if (node == null)
return Collections.emptySet();
return extractCorrelationIds(Collections.singletonList(node));
}
/** */
public static Set<Integer> notNullKeys(List<RexNode> row) {
if (F.isEmpty(row))
return Collections.emptySet();
Set<Integer> keys = new HashSet<>();
for (int i = 0; i < row.size(); ++i ) {
if (isNotNull(row.get(i)))
keys.add(i);
}
return keys;
}
/** @return Double value of the literal expression. */
public static double doubleFromRex(RexNode n, double def) {
try {
if (n.isA(SqlKind.LITERAL))
return ((RexLiteral)n).getValueAs(Double.class);
else
return def;
}
catch (Exception e) {
assert false : "Unable to extract value: " + e.getMessage();
return def;
}
}
/** */
public static Set<CorrelationId> extractCorrelationIds(List<RexNode> nodes) {
final Set<CorrelationId> cors = new HashSet<>();
RexVisitor<Void> v = new RexVisitorImpl<Void>(true) {
@Override public Void visitCorrelVariable(RexCorrelVariable correlVariable) {
cors.add(correlVariable.id);
return null;
}
};
nodes.forEach(rex -> rex.accept(v));
return cors;
}
/**
* Expand SEARCH/SARG operator with preceding NULLs check for nullable input operator.
* This allows to eliminate NULL checks for every argument of SARG and reduces generated bytecode.
*/
public static RexNode expandSearchNullable(RexBuilder rexBuilder, @Nullable RexProgram program, RexCall call) {
assert call.getOperator() == SqlStdOperatorTable.SEARCH : "Unexpected operator: " + call.getOperator();
RexNode op0 = call.getOperands().get(0);
RexNode op1 = call.getOperands().get(1);
if (!op0.getType().isNullable())
return RexUtil.expandSearch(rexBuilder, program, call);
while (op1 instanceof RexLocalRef) // Dereference local variable.
op1 = requireNonNull(program, "program").getExprList().get(((RexSlot)op1).getIndex());
assert op1 instanceof RexLiteral : "Unexpected operand class: " + op1;
Sarg<?> arg = ((RexLiteral)op1).getValueAs(Sarg.class);
RexNode nullAs = arg.nullAs == RexUnknownAs.TRUE || arg.nullAs == RexUnknownAs.FALSE
? rexBuilder.makeLiteral(arg.nullAs.toBoolean())
: rexBuilder.makeNullLiteral(rexBuilder.getTypeFactory().createSqlType(SqlTypeName.BOOLEAN));
RexNode expandedSearch = RexUtil.expandSearch(rexBuilder, program,
rexBuilder.makeCall(call.getOperator(), rexBuilder.makeNotNull(op0), op1));
return rexBuilder.makeCall(SqlStdOperatorTable.CASE,
rexBuilder.makeCall(SqlStdOperatorTable.IS_NULL, op0),
nullAs,
expandedSearch
);
}
/**
* Traverse {@code node} and expand all SEARCH/SARG operators (with preceding NULLs check).
*
* @see #expandSearchNullable
*/
public static RexNode expandSearchNullableRecursive(RexBuilder rexBuilder, @Nullable RexProgram program, RexNode node) {
return node.accept(new SearchExpandingShuttle(rexBuilder, program));
}
/** */
private static Boolean containsRef(RexNode node) {
RexVisitor<Void> v = new RexVisitorImpl<Void>(true) {
@Override public Void visitInputRef(RexInputRef inputRef) {
throw Util.FoundOne.NULL;
}
@Override public Void visitLocalRef(RexLocalRef locRef) {
throw Util.FoundOne.NULL;
}
};
try {
node.accept(v);
return false;
}
catch (Util.FoundOne e) {
return true;
}
}
/** */
private static Boolean containsFieldAccess(RexNode node) {
RexVisitor<Void> v = new RexVisitorImpl<Void>(true) {
@Override public Void visitFieldAccess(RexFieldAccess fieldAccess) {
throw Util.FoundOne.NULL;
}
};
try {
node.accept(v);
return false;
}
catch (Util.FoundOne e) {
return true;
}
}
/** Visitor for replacing scan local refs to input refs. */
private static class LocalRefReplacer extends RexShuttle {
/** */
private static final RexShuttle INSTANCE = new LocalRefReplacer();
/** {@inheritDoc} */
@Override public RexNode visitLocalRef(RexLocalRef inputRef) {
return new RexInputRef(inputRef.getIndex(), inputRef.getType());
}
}
/** Visitor for replacing input refs to local refs. We need it for proper plan serialization. */
private static class InputRefReplacer extends RexShuttle {
/** */
private static final RexShuttle INSTANCE = new InputRefReplacer();
/** {@inheritDoc} */
@Override public RexNode visitInputRef(RexInputRef inputRef) {
return new RexLocalRef(inputRef.getIndex(), inputRef.getType());
}
}
/** */
private static class SearchExpandingShuttle extends RexShuttle {
/** */
private final RexBuilder rexBuilder;
/** */
private final @Nullable RexProgram program;
/** */
public SearchExpandingShuttle(RexBuilder rexBuilder, @Nullable RexProgram program) {
this.rexBuilder = rexBuilder;
this.program = program;
}
/** {@inheritDoc} */
@Override public RexNode visitCall(RexCall call) {
if (call.getOperator() == SqlStdOperatorTable.SEARCH)
return expandSearchNullable(rexBuilder, program, call);
return super.visitCall(call);
}
}
}
|
apache/directory-studio | 37,072 | plugins/schemaeditor/src/main/java/org/apache/directory/studio/schemaeditor/view/views/SearchView.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*
*/
package org.apache.directory.studio.schemaeditor.view.views;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.regex.Pattern;
import org.apache.directory.api.ldap.model.schema.AttributeType;
import org.apache.directory.api.ldap.model.schema.ObjectClass;
import org.apache.directory.api.ldap.model.schema.SchemaObject;
import org.apache.directory.studio.schemaeditor.Activator;
import org.apache.directory.studio.schemaeditor.PluginConstants;
import org.apache.directory.studio.schemaeditor.PluginUtils;
import org.apache.directory.studio.schemaeditor.controller.SchemaHandler;
import org.apache.directory.studio.schemaeditor.controller.SearchViewController;
import org.apache.directory.studio.schemaeditor.view.ViewUtils;
import org.apache.directory.studio.schemaeditor.view.editors.attributetype.AttributeTypeEditor;
import org.apache.directory.studio.schemaeditor.view.editors.attributetype.AttributeTypeEditorInput;
import org.apache.directory.studio.schemaeditor.view.editors.objectclass.ObjectClassEditor;
import org.apache.directory.studio.schemaeditor.view.editors.objectclass.ObjectClassEditorInput;
import org.apache.directory.studio.schemaeditor.view.search.SearchPage;
import org.apache.directory.studio.schemaeditor.view.search.SearchPage.SearchInEnum;
import org.eclipse.jface.action.Action;
import org.eclipse.jface.dialogs.IDialogSettings;
import org.eclipse.jface.viewers.DecoratingLabelProvider;
import org.eclipse.jface.viewers.DoubleClickEvent;
import org.eclipse.jface.viewers.IDoubleClickListener;
import org.eclipse.jface.viewers.StructuredSelection;
import org.eclipse.jface.viewers.TableViewer;
import org.eclipse.swt.SWT;
import org.eclipse.swt.events.KeyAdapter;
import org.eclipse.swt.events.KeyEvent;
import org.eclipse.swt.events.ModifyEvent;
import org.eclipse.swt.events.ModifyListener;
import org.eclipse.swt.events.SelectionAdapter;
import org.eclipse.swt.events.SelectionEvent;
import org.eclipse.swt.graphics.Point;
import org.eclipse.swt.graphics.Rectangle;
import org.eclipse.swt.layout.GridData;
import org.eclipse.swt.layout.GridLayout;
import org.eclipse.swt.widgets.Button;
import org.eclipse.swt.widgets.Composite;
import org.eclipse.swt.widgets.Label;
import org.eclipse.swt.widgets.Menu;
import org.eclipse.swt.widgets.MenuItem;
import org.eclipse.swt.widgets.Table;
import org.eclipse.swt.widgets.Text;
import org.eclipse.swt.widgets.ToolBar;
import org.eclipse.swt.widgets.ToolItem;
import org.eclipse.ui.IEditorInput;
import org.eclipse.ui.PartInitException;
import org.eclipse.ui.PlatformUI;
import org.eclipse.ui.part.ViewPart;
/**
* This class represents the Search View.
*
* @author <a href="mailto:dev@directory.apache.org">Apache Directory Project</a>
*/
public class SearchView extends ViewPart
{
/** The view's ID */
public static final String ID = PluginConstants.VIEW_SEARCH_VIEW_ID;
/** The current Search String */
private String searchString;
// UI fields
private Text searchField;
private Button searchButton;
private Label searchResultsLabel;
private Table resultsTable;
private TableViewer resultsTableViewer;
private Composite searchFieldComposite;
private Composite searchFieldInnerComposite;
private Label separatorLabel;
/** The parent composite */
private Composite parent;
/**
* {@inheritDoc}
*/
public void createPartControl( Composite parent )
{
// Help Context for Dynamic Help
PlatformUI.getWorkbench().getHelpSystem().setHelp( parent, PluginConstants.PLUGIN_ID + "." + "search_view" ); //$NON-NLS-1$ //$NON-NLS-2$
this.parent = parent;
GridLayout gridLayout = new GridLayout();
gridLayout.horizontalSpacing = 0;
gridLayout.marginBottom = 0;
gridLayout.marginHeight = 0;
gridLayout.marginLeft = 0;
gridLayout.marginRight = 0;
gridLayout.marginTop = 0;
gridLayout.marginWidth = 0;
gridLayout.verticalSpacing = 0;
parent.setLayout( gridLayout );
// Search Field
searchFieldComposite = new Composite( parent, SWT.NONE );
gridLayout = new GridLayout();
gridLayout.horizontalSpacing = 0;
gridLayout.marginBottom = 0;
gridLayout.marginHeight = 0;
gridLayout.marginLeft = 0;
gridLayout.marginRight = 0;
gridLayout.marginTop = 0;
gridLayout.marginWidth = 0;
gridLayout.verticalSpacing = 0;
searchFieldComposite.setLayout( gridLayout );
searchFieldComposite.setLayoutData( new GridData( SWT.FILL, SWT.NONE, true, false ) );
// This searchFieldCompositeSeparator is used to display correctly the searchFieldComposite,
// since an empty composite does not display well.
Label searchFieldCompositeSeparator = new Label( searchFieldComposite, SWT.SEPARATOR | SWT.HORIZONTAL );
GridData gridData = new GridData( SWT.FILL, SWT.NONE, true, false );
gridData.heightHint = 1;
searchFieldCompositeSeparator.setLayoutData( gridData );
searchFieldCompositeSeparator.setVisible( false );
// Search Results Label
searchResultsLabel = new Label( parent, SWT.NONE );
searchResultsLabel.setLayoutData( new GridData( SWT.FILL, SWT.NONE, true, false ) );
// Separator Label
Label separatorLabel2 = new Label( parent, SWT.SEPARATOR | SWT.HORIZONTAL );
separatorLabel2.setLayoutData( new GridData( SWT.FILL, SWT.NONE, true, false ) );
// Create the table
createTableViewer();
setSearchResultsLabel( null, 0 );
new SearchViewController( this );
}
/**
* Create the Search Field Sections.
*/
private void createSearchField()
{
// Search Inner Composite
searchFieldInnerComposite = new Composite( searchFieldComposite, SWT.NONE );
GridLayout searchFieldInnerCompositeGridLayout = new GridLayout( 4, false );
searchFieldInnerCompositeGridLayout.horizontalSpacing = 1;
searchFieldInnerCompositeGridLayout.verticalSpacing = 1;
searchFieldInnerCompositeGridLayout.marginHeight = 1;
searchFieldInnerCompositeGridLayout.marginWidth = 2;
searchFieldInnerComposite.setLayout( searchFieldInnerCompositeGridLayout );
searchFieldInnerComposite.setLayoutData( new GridData( SWT.FILL, SWT.NONE, true, false ) );
// Search Label
Label searchFieldLabel = new Label( searchFieldInnerComposite, SWT.NONE );
searchFieldLabel.setText( Messages.getString( "SearchView.SearchColon" ) ); //$NON-NLS-1$
searchFieldLabel.setLayoutData( new GridData( SWT.NONE, SWT.CENTER, false, false ) );
// Search Text Field
searchField = new Text( searchFieldInnerComposite, SWT.BORDER | SWT.SEARCH | SWT.CANCEL );
if ( searchString != null )
{
searchField.setText( searchString );
}
searchField.setLayoutData( new GridData( SWT.FILL, SWT.CENTER, true, false ) );
searchField.addModifyListener( new ModifyListener()
{
public void modifyText( ModifyEvent e )
{
validateSearchField();
}
} );
searchField.addKeyListener( new KeyAdapter()
{
public void keyReleased( KeyEvent e )
{
if ( e.keyCode == SWT.ARROW_DOWN )
{
resultsTable.setFocus();
}
else if ( ( e.keyCode == Action.findKeyCode( "RETURN" ) ) || ( e.keyCode == SWT.KEYPAD_CR ) ) //$NON-NLS-1$
{
search();
}
}
} );
// Search Toolbar
final ToolBar searchToolBar = new ToolBar( searchFieldInnerComposite, SWT.HORIZONTAL | SWT.FLAT );
// Creating the Search In ToolItem
final ToolItem searchInToolItem = new ToolItem( searchToolBar, SWT.DROP_DOWN );
searchInToolItem.setText( Messages.getString( "SearchView.SearchIn" ) ); //$NON-NLS-1$
// Adding the action to display the Menu when the item is clicked
searchInToolItem.addSelectionListener( new SelectionAdapter()
{
public void widgetSelected( SelectionEvent event )
{
Rectangle rect = searchInToolItem.getBounds();
Point pt = new Point( rect.x, rect.y + rect.height );
pt = searchToolBar.toDisplay( pt );
Menu menu = createSearchInMenu();
menu.setLocation( pt.x, pt.y );
menu.setVisible( true );
}
} );
new ToolItem( searchToolBar, SWT.SEPARATOR );
final ToolItem scopeToolItem = new ToolItem( searchToolBar, SWT.DROP_DOWN );
scopeToolItem.setText( Messages.getString( "SearchView.Scope" ) ); //$NON-NLS-1$
// Adding the action to display the Menu when the item is clicked
scopeToolItem.addSelectionListener( new SelectionAdapter()
{
public void widgetSelected( SelectionEvent event )
{
Rectangle rect = scopeToolItem.getBounds();
Point pt = new Point( rect.x, rect.y + rect.height );
pt = searchToolBar.toDisplay( pt );
Menu menu = createScopeMenu();
menu.setLocation( pt.x, pt.y );
menu.setVisible( true );
}
} );
searchToolBar.setLayoutData( new GridData( SWT.NONE, SWT.CENTER, false, false ) );
// Search Button
searchButton = new Button( searchFieldInnerComposite, SWT.PUSH | SWT.DOWN );
searchButton.setEnabled( false );
searchButton.setImage( Activator.getDefault().getImage( PluginConstants.IMG_SEARCH ) );
searchButton.setToolTipText( Messages.getString( "SearchView.Search" ) ); //$NON-NLS-1$
searchButton.addSelectionListener( new SelectionAdapter()
{
public void widgetSelected( SelectionEvent e )
{
search();
}
} );
searchButton.setLayoutData( new GridData( SWT.NONE, SWT.CENTER, false, false ) );
// Separator Label
separatorLabel = new Label( searchFieldComposite, SWT.SEPARATOR | SWT.HORIZONTAL );
separatorLabel.setLayoutData( new GridData( SWT.FILL, SWT.NONE, true, false ) );
}
/**
* Creates the Search In Menu
*
* @return
* the Search In menu
*/
public Menu createSearchInMenu()
{
final IDialogSettings settings = Activator.getDefault().getDialogSettings();
// Creating the associated Menu
Menu searchInMenu = new Menu( PlatformUI.getWorkbench().getActiveWorkbenchWindow().getShell(), SWT.POP_UP );
// Filling the menu
// Aliases
final MenuItem aliasesMenuItem = new MenuItem( searchInMenu, SWT.CHECK );
aliasesMenuItem.setText( Messages.getString( "SearchView.Aliases" ) ); //$NON-NLS-1$
aliasesMenuItem.addSelectionListener( new SelectionAdapter()
{
public void widgetSelected( SelectionEvent e )
{
settings.put( PluginConstants.PREFS_SEARCH_PAGE_SEARCH_IN_ALIASES, aliasesMenuItem.getSelection() );
}
} );
// OID
final MenuItem oidMenuItem = new MenuItem( searchInMenu, SWT.CHECK );
oidMenuItem.setText( Messages.getString( "SearchView.OID" ) ); //$NON-NLS-1$
oidMenuItem.addSelectionListener( new SelectionAdapter()
{
public void widgetSelected( SelectionEvent e )
{
settings.put( PluginConstants.PREFS_SEARCH_PAGE_SEARCH_IN_OID, oidMenuItem.getSelection() );
}
} );
// Description
final MenuItem descriptionMenuItem = new MenuItem( searchInMenu, SWT.CHECK );
descriptionMenuItem.setText( Messages.getString( "SearchView.Description" ) ); //$NON-NLS-1$
descriptionMenuItem.addSelectionListener( new SelectionAdapter()
{
public void widgetSelected( SelectionEvent e )
{
settings.put( PluginConstants.PREFS_SEARCH_PAGE_SEARCH_IN_DESCRIPTION, descriptionMenuItem
.getSelection() );
}
} );
// Separator
new MenuItem( searchInMenu, SWT.SEPARATOR );
// Superior
final MenuItem superiorMenuItem = new MenuItem( searchInMenu, SWT.CHECK );
superiorMenuItem.setText( Messages.getString( "SearchView.Superior" ) ); //$NON-NLS-1$
superiorMenuItem.addSelectionListener( new SelectionAdapter()
{
public void widgetSelected( SelectionEvent e )
{
settings.put( PluginConstants.PREFS_SEARCH_PAGE_SEARCH_IN_SUPERIOR, superiorMenuItem.getSelection() );
}
} );
// Syntax
final MenuItem syntaxMenuItem = new MenuItem( searchInMenu, SWT.CHECK );
syntaxMenuItem.setText( Messages.getString( "SearchView.Syntax" ) ); //$NON-NLS-1$
syntaxMenuItem.addSelectionListener( new SelectionAdapter()
{
public void widgetSelected( SelectionEvent e )
{
settings.put( PluginConstants.PREFS_SEARCH_PAGE_SEARCH_IN_SYNTAX, syntaxMenuItem.getSelection() );
}
} );
// Matching Rules
final MenuItem matchingRulesMenuItem = new MenuItem( searchInMenu, SWT.CHECK );
matchingRulesMenuItem.setText( Messages.getString( "SearchView.MatchingRules" ) ); //$NON-NLS-1$
matchingRulesMenuItem.addSelectionListener( new SelectionAdapter()
{
public void widgetSelected( SelectionEvent e )
{
settings.put( PluginConstants.PREFS_SEARCH_PAGE_SEARCH_IN_MATCHING_RULES, matchingRulesMenuItem
.getSelection() );
}
} );
// Separator
new MenuItem( searchInMenu, SWT.SEPARATOR );
// Superiors
final MenuItem superiorsMenuItem = new MenuItem( searchInMenu, SWT.CHECK );
superiorsMenuItem.setText( Messages.getString( "SearchView.Superiors" ) ); //$NON-NLS-1$
superiorsMenuItem.addSelectionListener( new SelectionAdapter()
{
public void widgetSelected( SelectionEvent e )
{
settings.put( PluginConstants.PREFS_SEARCH_PAGE_SEARCH_IN_SUPERIORS, superiorsMenuItem.getSelection() );
}
} );
// Mandatory Attributes
final MenuItem mandatoryAttributesMenuItem = new MenuItem( searchInMenu, SWT.CHECK );
mandatoryAttributesMenuItem.setText( Messages.getString( "SearchView.MandatoryAttributes" ) ); //$NON-NLS-1$
mandatoryAttributesMenuItem.addSelectionListener( new SelectionAdapter()
{
public void widgetSelected( SelectionEvent e )
{
settings.put( PluginConstants.PREFS_SEARCH_PAGE_SEARCH_IN_MANDATORY_ATTRIBUTES,
mandatoryAttributesMenuItem.getSelection() );
}
} );
// Optional Attributes
final MenuItem optionalAttributesMenuItem = new MenuItem( searchInMenu, SWT.CHECK );
optionalAttributesMenuItem.setText( Messages.getString( "SearchView.OptionalAttributes" ) ); //$NON-NLS-1$
optionalAttributesMenuItem.addSelectionListener( new SelectionAdapter()
{
public void widgetSelected( SelectionEvent e )
{
settings.put( PluginConstants.PREFS_SEARCH_PAGE_SEARCH_IN_OPTIONAL_ATTRIBUTES,
optionalAttributesMenuItem.getSelection() );
}
} );
if ( settings.get( PluginConstants.PREFS_SEARCH_PAGE_SEARCH_IN_ALIASES ) == null )
{
aliasesMenuItem.setSelection( true );
}
else
{
aliasesMenuItem.setSelection( settings.getBoolean( PluginConstants.PREFS_SEARCH_PAGE_SEARCH_IN_ALIASES ) );
}
if ( settings.get( PluginConstants.PREFS_SEARCH_PAGE_SEARCH_IN_OID ) == null )
{
oidMenuItem.setSelection( true );
}
else
{
oidMenuItem.setSelection( settings.getBoolean( PluginConstants.PREFS_SEARCH_PAGE_SEARCH_IN_OID ) );
}
if ( settings.get( PluginConstants.PREFS_SEARCH_PAGE_SEARCH_IN_DESCRIPTION ) == null )
{
descriptionMenuItem.setSelection( true );
}
else
{
descriptionMenuItem.setSelection( settings
.getBoolean( PluginConstants.PREFS_SEARCH_PAGE_SEARCH_IN_DESCRIPTION ) );
}
superiorMenuItem.setSelection( settings.getBoolean( PluginConstants.PREFS_SEARCH_PAGE_SEARCH_IN_SUPERIOR ) );
syntaxMenuItem.setSelection( settings.getBoolean( PluginConstants.PREFS_SEARCH_PAGE_SEARCH_IN_SYNTAX ) );
matchingRulesMenuItem.setSelection( settings
.getBoolean( PluginConstants.PREFS_SEARCH_PAGE_SEARCH_IN_MATCHING_RULES ) );
superiorsMenuItem.setSelection( settings.getBoolean( PluginConstants.PREFS_SEARCH_PAGE_SEARCH_IN_SUPERIORS ) );
mandatoryAttributesMenuItem.setSelection( settings
.getBoolean( PluginConstants.PREFS_SEARCH_PAGE_SEARCH_IN_MANDATORY_ATTRIBUTES ) );
optionalAttributesMenuItem.setSelection( settings
.getBoolean( PluginConstants.PREFS_SEARCH_PAGE_SEARCH_IN_OPTIONAL_ATTRIBUTES ) );
return searchInMenu;
}
/**
* Creates the Scope Menu
*
* @return
* the Scope menu
*/
public Menu createScopeMenu()
{
final IDialogSettings settings = Activator.getDefault().getDialogSettings();
// Creating the associated Menu
Menu scopeMenu = new Menu( PlatformUI.getWorkbench().getActiveWorkbenchWindow().getShell(), SWT.POP_UP );
// Filling the menu
// Attribute Types And Object Classes
final MenuItem attributeTypesAndObjectClassesMenuItem = new MenuItem( scopeMenu, SWT.RADIO );
attributeTypesAndObjectClassesMenuItem.setText( Messages.getString( "SearchView.TypesAndClasses" ) ); //$NON-NLS-1$
attributeTypesAndObjectClassesMenuItem.addSelectionListener( new SelectionAdapter()
{
public void widgetSelected( SelectionEvent e )
{
settings.put( PluginConstants.PREFS_SEARCH_PAGE_SCOPE,
PluginConstants.PREFS_SEARCH_PAGE_SCOPE_AT_AND_OC );
}
} );
// Attributes Type Only
final MenuItem attributesTypesOnlyMenuItem = new MenuItem( scopeMenu, SWT.RADIO );
attributesTypesOnlyMenuItem.setText( Messages.getString( "SearchView.TypesOnly" ) ); //$NON-NLS-1$
attributesTypesOnlyMenuItem.addSelectionListener( new SelectionAdapter()
{
public void widgetSelected( SelectionEvent e )
{
settings.put( PluginConstants.PREFS_SEARCH_PAGE_SCOPE, PluginConstants.PREFS_SEARCH_PAGE_SCOPE_AT_ONLY );
}
} );
// Object Classes Only
final MenuItem objectClassesMenuItem = new MenuItem( scopeMenu, SWT.RADIO );
objectClassesMenuItem.setText( Messages.getString( "SearchView.ClassesOnly" ) ); //$NON-NLS-1$
objectClassesMenuItem.addSelectionListener( new SelectionAdapter()
{
public void widgetSelected( SelectionEvent e )
{
settings.put( PluginConstants.PREFS_SEARCH_PAGE_SCOPE, PluginConstants.PREFS_SEARCH_PAGE_SCOPE_OC_ONLY );
}
} );
if ( settings.get( PluginConstants.PREFS_SEARCH_PAGE_SCOPE ) == null )
{
attributeTypesAndObjectClassesMenuItem.setSelection( true );
}
else
{
switch ( settings.getInt( PluginConstants.PREFS_SEARCH_PAGE_SCOPE ) )
{
case PluginConstants.PREFS_SEARCH_PAGE_SCOPE_AT_AND_OC:
attributeTypesAndObjectClassesMenuItem.setSelection( true );
break;
case PluginConstants.PREFS_SEARCH_PAGE_SCOPE_AT_ONLY:
attributesTypesOnlyMenuItem.setSelection( true );
break;
case PluginConstants.PREFS_SEARCH_PAGE_SCOPE_OC_ONLY:
objectClassesMenuItem.setSelection( true );
break;
}
}
return scopeMenu;
}
/**
* Creates the TableViewer.
*/
private void createTableViewer()
{
// Creating the TableViewer
resultsTable = new Table( parent, SWT.SINGLE | SWT.H_SCROLL | SWT.V_SCROLL | SWT.FULL_SELECTION
| SWT.HIDE_SELECTION );
GridData gridData = new GridData( SWT.FILL, SWT.FILL, true, true );
resultsTable.setLayoutData( gridData );
resultsTable.setLinesVisible( true );
// Creating the TableViewer
resultsTableViewer = new TableViewer( resultsTable );
resultsTableViewer.setLabelProvider( new DecoratingLabelProvider( new SearchViewLabelProvider(), Activator
.getDefault().getWorkbench().getDecoratorManager().getLabelDecorator() ) );
resultsTableViewer.setContentProvider( new SearchViewContentProvider() );
// Adding listeners
resultsTable.addKeyListener( new KeyAdapter()
{
public void keyPressed( KeyEvent e )
{
if ( ( e.keyCode == Action.findKeyCode( "RETURN" ) ) || ( e.keyCode == SWT.KEYPAD_CR ) ) // return key //$NON-NLS-1$
{
openEditor();
}
}
} );
resultsTableViewer.addDoubleClickListener( new IDoubleClickListener()
{
public void doubleClick( DoubleClickEvent event )
{
openEditor();
}
} );
}
/**
* Open the editor associated with the current selection in the table.
*/
private void openEditor()
{
if ( Activator.getDefault().getSchemaHandler() != null )
{
StructuredSelection selection = ( StructuredSelection ) resultsTableViewer.getSelection();
if ( !selection.isEmpty() )
{
Object item = selection.getFirstElement();
IEditorInput input = null;
String editorId = null;
// Here is the double clicked item
if ( item instanceof AttributeType )
{
input = new AttributeTypeEditorInput( ( AttributeType ) item );
editorId = AttributeTypeEditor.ID;
}
else if ( item instanceof ObjectClass )
{
input = new ObjectClassEditorInput( ( ObjectClass ) item );
editorId = ObjectClassEditor.ID;
}
// Let's open the editor
if ( input != null )
{
try
{
PlatformUI.getWorkbench().getActiveWorkbenchWindow().getActivePage().openEditor( input,
editorId );
}
catch ( PartInitException exception )
{
PluginUtils.logError( Messages.getString( "SearchView.ErrorOpeningEditor" ), exception ); //$NON-NLS-1$
ViewUtils
.displayErrorMessageDialog(
Messages.getString( "SearchView.Error" ), Messages.getString( "SearchView.ErrorOpeningEditor" ) ); //$NON-NLS-1$ //$NON-NLS-2$
}
}
}
}
}
/**
* {@inheritDoc}
*/
public void setFocus()
{
if ( searchField != null && !searchField.isDisposed() )
{
searchField.setFocus();
}
else
{
resultsTable.setFocus();
}
}
/**
* Shows the Search Field Section.
*/
public void showSearchFieldSection()
{
createSearchField();
parent.layout( true, true );
searchField.setFocus();
validateSearchField();
}
/**
* Hides the Search Field Section.
*/
public void hideSearchFieldSection()
{
if ( searchFieldInnerComposite != null )
{
searchFieldInnerComposite.dispose();
searchFieldInnerComposite = null;
}
if ( separatorLabel != null )
{
separatorLabel.dispose();
separatorLabel = null;
}
parent.layout( true, true );
resultsTable.setFocus();
}
private void validateSearchField()
{
searchButton.setEnabled( searchField.getText().length() > 0 );
}
/**
* Sets the Search Input.
*
* @param searchString
* the search String
* @param searchIn
* the search In
* @param scope
* the scope
*/
public void setSearchInput( String searchString, SearchInEnum[] searchIn, int scope )
{
this.searchString = searchString;
// Saving search String and Search Scope to dialog settings
SearchPage.addSearchStringHistory( searchString );
SearchPage.saveSearchScope( Arrays.asList( searchIn ) );
if ( ( searchField != null ) && ( !searchField.isDisposed() ) )
{
searchField.setText( searchString );
validateSearchField();
}
List<SchemaObject> results = search( searchString, searchIn, scope );
setSearchResultsLabel( searchString, results.size() );
resultsTableViewer.setInput( results );
}
/**
* Searches the objects corresponding to the search parameters.
*
* @param searchString
* the search String
* @param searchIn
* the search In
* @param scope
* the scope
*/
private List<SchemaObject> search( String searchString, SearchInEnum[] searchIn, int scope )
{
List<SchemaObject> searchResults = new ArrayList<SchemaObject>();
if ( searchString != null )
{
String computedSearchString = searchString.replaceAll( "\\*", "[\\\\S]*" ); //$NON-NLS-1$ //$NON-NLS-2$
computedSearchString = computedSearchString.replaceAll( "\\?", "[\\\\S]" ); //$NON-NLS-1$ //$NON-NLS-2$
Pattern pattern = Pattern.compile( computedSearchString, Pattern.CASE_INSENSITIVE );
SchemaHandler schemaHandler = Activator.getDefault().getSchemaHandler();
if ( schemaHandler != null )
{
List<SearchInEnum> searchScope = new ArrayList<SearchInEnum>( Arrays.asList( searchIn ) );
if ( ( scope == PluginConstants.PREFS_SEARCH_PAGE_SCOPE_AT_AND_OC )
|| ( scope == PluginConstants.PREFS_SEARCH_PAGE_SCOPE_AT_ONLY ) )
{
// Looping on attribute types
List<AttributeType> attributeTypes = schemaHandler.getAttributeTypes();
for ( AttributeType at : attributeTypes )
{
// Aliases
if ( searchScope.contains( SearchInEnum.ALIASES ) )
{
if ( checkList( pattern, at.getNames() ) )
{
searchResults.add( at );
continue;
}
}
// OID
if ( searchScope.contains( SearchInEnum.OID ) )
{
if ( checkString( pattern, at.getOid() ) )
{
searchResults.add( at );
continue;
}
}
// Description
if ( searchScope.contains( SearchInEnum.DESCRIPTION ) )
{
if ( checkString( pattern, at.getDescription() ) )
{
searchResults.add( at );
continue;
}
}
// Superior
if ( searchScope.contains( SearchInEnum.SUPERIOR ) )
{
if ( checkString( pattern, at.getSuperiorOid() ) )
{
searchResults.add( at );
continue;
}
}
// Syntax
if ( searchScope.contains( SearchInEnum.SYNTAX ) )
{
if ( checkString( pattern, at.getSyntaxOid() ) )
{
searchResults.add( at );
continue;
}
}
// Matching Rules
if ( searchScope.contains( SearchInEnum.MATCHING_RULES ) )
{
// Equality
if ( checkString( pattern, at.getEqualityOid() ) )
{
searchResults.add( at );
continue;
}
// Ordering
if ( checkString( pattern, at.getOrderingOid() ) )
{
searchResults.add( at );
continue;
}
// Substring
if ( checkString( pattern, at.getSubstringOid() ) )
{
searchResults.add( at );
continue;
}
}
}
}
if ( ( scope == PluginConstants.PREFS_SEARCH_PAGE_SCOPE_AT_AND_OC )
|| ( scope == PluginConstants.PREFS_SEARCH_PAGE_SCOPE_OC_ONLY ) )
{
// Looping on object classes
List<ObjectClass> objectClasses = schemaHandler.getObjectClasses();
for ( ObjectClass oc : objectClasses )
{
// Aliases
if ( searchScope.contains( SearchInEnum.ALIASES ) )
{
if ( checkList( pattern, oc.getNames() ) )
{
searchResults.add( oc );
continue;
}
}
// OID
if ( searchScope.contains( SearchInEnum.OID ) )
{
if ( checkString( pattern, oc.getOid() ) )
{
searchResults.add( oc );
continue;
}
}
// Description
if ( searchScope.contains( SearchInEnum.DESCRIPTION ) )
{
if ( checkString( pattern, oc.getDescription() ) )
{
searchResults.add( oc );
continue;
}
}
// Superiors
if ( searchScope.contains( SearchInEnum.SUPERIORS ) )
{
if ( checkList( pattern, oc.getSuperiorOids() ) )
{
searchResults.add( oc );
continue;
}
}
// Mandatory Attributes
if ( searchScope.contains( SearchInEnum.MANDATORY_ATTRIBUTES ) )
{
if ( checkList( pattern, oc.getMustAttributeTypeOids() ) )
{
searchResults.add( oc );
continue;
}
}
// Optional Attributes
if ( searchScope.contains( SearchInEnum.OPTIONAL_ATTRIBUTES ) )
{
if ( checkList( pattern, oc.getMayAttributeTypeOids() ) )
{
searchResults.add( oc );
continue;
}
}
}
}
}
}
return searchResults;
}
/**
* Check an array with the given pattern.
*
* @param pattern
* the Regex pattern
* @param list
* the array
* @return
* true if the pattern matches one of the aliases, false, if not.
*/
private boolean checkList( Pattern pattern, List<String> list )
{
if ( list != null )
{
for ( String string : list )
{
if ( pattern.matcher( string ).matches() )
{
return true;
}
}
}
return false;
}
private boolean checkString( Pattern pattern, String string )
{
if ( string != null )
{
return pattern.matcher( string ).matches();
}
return false;
}
/**
* Launches the search from the search fields views.
*/
private void search()
{
String searchString = searchField.getText();
List<SearchInEnum> searchScope = SearchPage.loadSearchIn();
setSearchInput( searchString, searchScope.toArray( new SearchInEnum[0] ), SearchPage.loadScope() );
}
/**
* Refresh the overview label with the number of results.
*
* @param searchString
* the search String
* @param resultsCount
* the number of results
*/
public void setSearchResultsLabel( String searchString, int resultsCount )
{
StringBuffer sb = new StringBuffer();
if ( searchString == null )
{
sb.append( Messages.getString( "SearchView.NoSearch" ) ); //$NON-NLS-1$
}
else
{
// Search String
sb.append( "'" + searchString + "'" ); //$NON-NLS-1$ //$NON-NLS-2$
sb.append( " - " ); //$NON-NLS-1$
// Search results count
sb.append( resultsCount );
sb.append( " " ); //$NON-NLS-1$
if ( resultsCount > 1 )
{
sb.append( Messages.getString( "SearchView.Matches" ) ); //$NON-NLS-1$
}
else
{
sb.append( Messages.getString( "SearchView.Match" ) ); //$NON-NLS-1$
}
sb.append( Messages.getString( "SearchView.InWorkspace" ) ); //$NON-NLS-1$
}
searchResultsLabel.setText( sb.toString() );
}
/**
* Runs the current search again.
*/
public void runCurrentSearchAgain()
{
if ( searchString != null )
{
setSearchInput( searchString, SearchPage.loadSearchIn().toArray( new SearchInEnum[0] ), SearchPage
.loadScope() );
}
}
/**
* Gets the Search String.
*
* @return
* the Search String or null if no Search String is set.
*/
public String getSearchString()
{
return searchString;
}
/**
* Refreshes the view.
*/
public void refresh()
{
resultsTableViewer.refresh();
}
}
|
googleapis/google-cloud-java | 36,941 | java-biglake/proto-google-cloud-biglake-v1/src/main/java/com/google/cloud/bigquery/biglake/v1/ListDatabasesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/bigquery/biglake/v1/metastore.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.bigquery.biglake.v1;
/**
*
*
* <pre>
* Response message for the ListDatabases method.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.biglake.v1.ListDatabasesResponse}
*/
public final class ListDatabasesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.bigquery.biglake.v1.ListDatabasesResponse)
ListDatabasesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListDatabasesResponse.newBuilder() to construct.
private ListDatabasesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListDatabasesResponse() {
databases_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListDatabasesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.biglake.v1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1_ListDatabasesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.biglake.v1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1_ListDatabasesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse.class,
com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse.Builder.class);
}
public static final int DATABASES_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.bigquery.biglake.v1.Database> databases_;
/**
*
*
* <pre>
* The databases from the specified catalog.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Database databases = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.bigquery.biglake.v1.Database> getDatabasesList() {
return databases_;
}
/**
*
*
* <pre>
* The databases from the specified catalog.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Database databases = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.bigquery.biglake.v1.DatabaseOrBuilder>
getDatabasesOrBuilderList() {
return databases_;
}
/**
*
*
* <pre>
* The databases from the specified catalog.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Database databases = 1;</code>
*/
@java.lang.Override
public int getDatabasesCount() {
return databases_.size();
}
/**
*
*
* <pre>
* The databases from the specified catalog.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Database databases = 1;</code>
*/
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1.Database getDatabases(int index) {
return databases_.get(index);
}
/**
*
*
* <pre>
* The databases from the specified catalog.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Database databases = 1;</code>
*/
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1.DatabaseOrBuilder getDatabasesOrBuilder(int index) {
return databases_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < databases_.size(); i++) {
output.writeMessage(1, databases_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < databases_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, databases_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse)) {
return super.equals(obj);
}
com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse other =
(com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse) obj;
if (!getDatabasesList().equals(other.getDatabasesList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getDatabasesCount() > 0) {
hash = (37 * hash) + DATABASES_FIELD_NUMBER;
hash = (53 * hash) + getDatabasesList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for the ListDatabases method.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.biglake.v1.ListDatabasesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.bigquery.biglake.v1.ListDatabasesResponse)
com.google.cloud.bigquery.biglake.v1.ListDatabasesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.biglake.v1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1_ListDatabasesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.biglake.v1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1_ListDatabasesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse.class,
com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse.Builder.class);
}
// Construct using com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (databasesBuilder_ == null) {
databases_ = java.util.Collections.emptyList();
} else {
databases_ = null;
databasesBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.bigquery.biglake.v1.MetastoreProto
.internal_static_google_cloud_bigquery_biglake_v1_ListDatabasesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse getDefaultInstanceForType() {
return com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse build() {
com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse buildPartial() {
com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse result =
new com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse result) {
if (databasesBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
databases_ = java.util.Collections.unmodifiableList(databases_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.databases_ = databases_;
} else {
result.databases_ = databasesBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse) {
return mergeFrom((com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse other) {
if (other == com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse.getDefaultInstance())
return this;
if (databasesBuilder_ == null) {
if (!other.databases_.isEmpty()) {
if (databases_.isEmpty()) {
databases_ = other.databases_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureDatabasesIsMutable();
databases_.addAll(other.databases_);
}
onChanged();
}
} else {
if (!other.databases_.isEmpty()) {
if (databasesBuilder_.isEmpty()) {
databasesBuilder_.dispose();
databasesBuilder_ = null;
databases_ = other.databases_;
bitField0_ = (bitField0_ & ~0x00000001);
databasesBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getDatabasesFieldBuilder()
: null;
} else {
databasesBuilder_.addAllMessages(other.databases_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.bigquery.biglake.v1.Database m =
input.readMessage(
com.google.cloud.bigquery.biglake.v1.Database.parser(), extensionRegistry);
if (databasesBuilder_ == null) {
ensureDatabasesIsMutable();
databases_.add(m);
} else {
databasesBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.bigquery.biglake.v1.Database> databases_ =
java.util.Collections.emptyList();
private void ensureDatabasesIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
databases_ =
new java.util.ArrayList<com.google.cloud.bigquery.biglake.v1.Database>(databases_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.bigquery.biglake.v1.Database,
com.google.cloud.bigquery.biglake.v1.Database.Builder,
com.google.cloud.bigquery.biglake.v1.DatabaseOrBuilder>
databasesBuilder_;
/**
*
*
* <pre>
* The databases from the specified catalog.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Database databases = 1;</code>
*/
public java.util.List<com.google.cloud.bigquery.biglake.v1.Database> getDatabasesList() {
if (databasesBuilder_ == null) {
return java.util.Collections.unmodifiableList(databases_);
} else {
return databasesBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The databases from the specified catalog.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Database databases = 1;</code>
*/
public int getDatabasesCount() {
if (databasesBuilder_ == null) {
return databases_.size();
} else {
return databasesBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The databases from the specified catalog.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Database databases = 1;</code>
*/
public com.google.cloud.bigquery.biglake.v1.Database getDatabases(int index) {
if (databasesBuilder_ == null) {
return databases_.get(index);
} else {
return databasesBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The databases from the specified catalog.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Database databases = 1;</code>
*/
public Builder setDatabases(int index, com.google.cloud.bigquery.biglake.v1.Database value) {
if (databasesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDatabasesIsMutable();
databases_.set(index, value);
onChanged();
} else {
databasesBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The databases from the specified catalog.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Database databases = 1;</code>
*/
public Builder setDatabases(
int index, com.google.cloud.bigquery.biglake.v1.Database.Builder builderForValue) {
if (databasesBuilder_ == null) {
ensureDatabasesIsMutable();
databases_.set(index, builderForValue.build());
onChanged();
} else {
databasesBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The databases from the specified catalog.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Database databases = 1;</code>
*/
public Builder addDatabases(com.google.cloud.bigquery.biglake.v1.Database value) {
if (databasesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDatabasesIsMutable();
databases_.add(value);
onChanged();
} else {
databasesBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The databases from the specified catalog.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Database databases = 1;</code>
*/
public Builder addDatabases(int index, com.google.cloud.bigquery.biglake.v1.Database value) {
if (databasesBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureDatabasesIsMutable();
databases_.add(index, value);
onChanged();
} else {
databasesBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The databases from the specified catalog.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Database databases = 1;</code>
*/
public Builder addDatabases(
com.google.cloud.bigquery.biglake.v1.Database.Builder builderForValue) {
if (databasesBuilder_ == null) {
ensureDatabasesIsMutable();
databases_.add(builderForValue.build());
onChanged();
} else {
databasesBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The databases from the specified catalog.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Database databases = 1;</code>
*/
public Builder addDatabases(
int index, com.google.cloud.bigquery.biglake.v1.Database.Builder builderForValue) {
if (databasesBuilder_ == null) {
ensureDatabasesIsMutable();
databases_.add(index, builderForValue.build());
onChanged();
} else {
databasesBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The databases from the specified catalog.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Database databases = 1;</code>
*/
public Builder addAllDatabases(
java.lang.Iterable<? extends com.google.cloud.bigquery.biglake.v1.Database> values) {
if (databasesBuilder_ == null) {
ensureDatabasesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, databases_);
onChanged();
} else {
databasesBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The databases from the specified catalog.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Database databases = 1;</code>
*/
public Builder clearDatabases() {
if (databasesBuilder_ == null) {
databases_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
databasesBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The databases from the specified catalog.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Database databases = 1;</code>
*/
public Builder removeDatabases(int index) {
if (databasesBuilder_ == null) {
ensureDatabasesIsMutable();
databases_.remove(index);
onChanged();
} else {
databasesBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The databases from the specified catalog.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Database databases = 1;</code>
*/
public com.google.cloud.bigquery.biglake.v1.Database.Builder getDatabasesBuilder(int index) {
return getDatabasesFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The databases from the specified catalog.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Database databases = 1;</code>
*/
public com.google.cloud.bigquery.biglake.v1.DatabaseOrBuilder getDatabasesOrBuilder(int index) {
if (databasesBuilder_ == null) {
return databases_.get(index);
} else {
return databasesBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The databases from the specified catalog.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Database databases = 1;</code>
*/
public java.util.List<? extends com.google.cloud.bigquery.biglake.v1.DatabaseOrBuilder>
getDatabasesOrBuilderList() {
if (databasesBuilder_ != null) {
return databasesBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(databases_);
}
}
/**
*
*
* <pre>
* The databases from the specified catalog.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Database databases = 1;</code>
*/
public com.google.cloud.bigquery.biglake.v1.Database.Builder addDatabasesBuilder() {
return getDatabasesFieldBuilder()
.addBuilder(com.google.cloud.bigquery.biglake.v1.Database.getDefaultInstance());
}
/**
*
*
* <pre>
* The databases from the specified catalog.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Database databases = 1;</code>
*/
public com.google.cloud.bigquery.biglake.v1.Database.Builder addDatabasesBuilder(int index) {
return getDatabasesFieldBuilder()
.addBuilder(index, com.google.cloud.bigquery.biglake.v1.Database.getDefaultInstance());
}
/**
*
*
* <pre>
* The databases from the specified catalog.
* </pre>
*
* <code>repeated .google.cloud.bigquery.biglake.v1.Database databases = 1;</code>
*/
public java.util.List<com.google.cloud.bigquery.biglake.v1.Database.Builder>
getDatabasesBuilderList() {
return getDatabasesFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.bigquery.biglake.v1.Database,
com.google.cloud.bigquery.biglake.v1.Database.Builder,
com.google.cloud.bigquery.biglake.v1.DatabaseOrBuilder>
getDatabasesFieldBuilder() {
if (databasesBuilder_ == null) {
databasesBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.bigquery.biglake.v1.Database,
com.google.cloud.bigquery.biglake.v1.Database.Builder,
com.google.cloud.bigquery.biglake.v1.DatabaseOrBuilder>(
databases_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
databases_ = null;
}
return databasesBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.bigquery.biglake.v1.ListDatabasesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.bigquery.biglake.v1.ListDatabasesResponse)
private static final com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse();
}
public static com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListDatabasesResponse> PARSER =
new com.google.protobuf.AbstractParser<ListDatabasesResponse>() {
@java.lang.Override
public ListDatabasesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListDatabasesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListDatabasesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.bigquery.biglake.v1.ListDatabasesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 37,076 | google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/services/ListBatchJobResultsResponse.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v19/services/batch_job_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v19.services;
/**
* <pre>
* Response message for
* [BatchJobService.ListBatchJobResults][google.ads.googleads.v19.services.BatchJobService.ListBatchJobResults].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.services.ListBatchJobResultsResponse}
*/
public final class ListBatchJobResultsResponse extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v19.services.ListBatchJobResultsResponse)
ListBatchJobResultsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListBatchJobResultsResponse.newBuilder() to construct.
private ListBatchJobResultsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListBatchJobResultsResponse() {
results_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ListBatchJobResultsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.services.BatchJobServiceProto.internal_static_google_ads_googleads_v19_services_ListBatchJobResultsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.services.BatchJobServiceProto.internal_static_google_ads_googleads_v19_services_ListBatchJobResultsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.services.ListBatchJobResultsResponse.class, com.google.ads.googleads.v19.services.ListBatchJobResultsResponse.Builder.class);
}
public static final int RESULTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.ads.googleads.v19.services.BatchJobResult> results_;
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.BatchJobResult results = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.ads.googleads.v19.services.BatchJobResult> getResultsList() {
return results_;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.BatchJobResult results = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.ads.googleads.v19.services.BatchJobResultOrBuilder>
getResultsOrBuilderList() {
return results_;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.BatchJobResult results = 1;</code>
*/
@java.lang.Override
public int getResultsCount() {
return results_.size();
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.BatchJobResult results = 1;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v19.services.BatchJobResult getResults(int index) {
return results_.get(index);
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.BatchJobResult results = 1;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v19.services.BatchJobResultOrBuilder getResultsOrBuilder(
int index) {
return results_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < results_.size(); i++) {
output.writeMessage(1, results_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < results_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, results_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v19.services.ListBatchJobResultsResponse)) {
return super.equals(obj);
}
com.google.ads.googleads.v19.services.ListBatchJobResultsResponse other = (com.google.ads.googleads.v19.services.ListBatchJobResultsResponse) obj;
if (!getResultsList()
.equals(other.getResultsList())) return false;
if (!getNextPageToken()
.equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getResultsCount() > 0) {
hash = (37 * hash) + RESULTS_FIELD_NUMBER;
hash = (53 * hash) + getResultsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v19.services.ListBatchJobResultsResponse parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.ListBatchJobResultsResponse parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.ListBatchJobResultsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.ListBatchJobResultsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.ListBatchJobResultsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v19.services.ListBatchJobResultsResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.ListBatchJobResultsResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.ListBatchJobResultsResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.ListBatchJobResultsResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.ListBatchJobResultsResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v19.services.ListBatchJobResultsResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v19.services.ListBatchJobResultsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v19.services.ListBatchJobResultsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Response message for
* [BatchJobService.ListBatchJobResults][google.ads.googleads.v19.services.BatchJobService.ListBatchJobResults].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v19.services.ListBatchJobResultsResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.services.ListBatchJobResultsResponse)
com.google.ads.googleads.v19.services.ListBatchJobResultsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v19.services.BatchJobServiceProto.internal_static_google_ads_googleads_v19_services_ListBatchJobResultsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v19.services.BatchJobServiceProto.internal_static_google_ads_googleads_v19_services_ListBatchJobResultsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v19.services.ListBatchJobResultsResponse.class, com.google.ads.googleads.v19.services.ListBatchJobResultsResponse.Builder.class);
}
// Construct using com.google.ads.googleads.v19.services.ListBatchJobResultsResponse.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (resultsBuilder_ == null) {
results_ = java.util.Collections.emptyList();
} else {
results_ = null;
resultsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v19.services.BatchJobServiceProto.internal_static_google_ads_googleads_v19_services_ListBatchJobResultsResponse_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.ListBatchJobResultsResponse getDefaultInstanceForType() {
return com.google.ads.googleads.v19.services.ListBatchJobResultsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v19.services.ListBatchJobResultsResponse build() {
com.google.ads.googleads.v19.services.ListBatchJobResultsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.ListBatchJobResultsResponse buildPartial() {
com.google.ads.googleads.v19.services.ListBatchJobResultsResponse result = new com.google.ads.googleads.v19.services.ListBatchJobResultsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.ads.googleads.v19.services.ListBatchJobResultsResponse result) {
if (resultsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
results_ = java.util.Collections.unmodifiableList(results_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.results_ = results_;
} else {
result.results_ = resultsBuilder_.build();
}
}
private void buildPartial0(com.google.ads.googleads.v19.services.ListBatchJobResultsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v19.services.ListBatchJobResultsResponse) {
return mergeFrom((com.google.ads.googleads.v19.services.ListBatchJobResultsResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v19.services.ListBatchJobResultsResponse other) {
if (other == com.google.ads.googleads.v19.services.ListBatchJobResultsResponse.getDefaultInstance()) return this;
if (resultsBuilder_ == null) {
if (!other.results_.isEmpty()) {
if (results_.isEmpty()) {
results_ = other.results_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureResultsIsMutable();
results_.addAll(other.results_);
}
onChanged();
}
} else {
if (!other.results_.isEmpty()) {
if (resultsBuilder_.isEmpty()) {
resultsBuilder_.dispose();
resultsBuilder_ = null;
results_ = other.results_;
bitField0_ = (bitField0_ & ~0x00000001);
resultsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getResultsFieldBuilder() : null;
} else {
resultsBuilder_.addAllMessages(other.results_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
com.google.ads.googleads.v19.services.BatchJobResult m =
input.readMessage(
com.google.ads.googleads.v19.services.BatchJobResult.parser(),
extensionRegistry);
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.add(m);
} else {
resultsBuilder_.addMessage(m);
}
break;
} // case 10
case 18: {
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.ads.googleads.v19.services.BatchJobResult> results_ =
java.util.Collections.emptyList();
private void ensureResultsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
results_ = new java.util.ArrayList<com.google.ads.googleads.v19.services.BatchJobResult>(results_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v19.services.BatchJobResult, com.google.ads.googleads.v19.services.BatchJobResult.Builder, com.google.ads.googleads.v19.services.BatchJobResultOrBuilder> resultsBuilder_;
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.BatchJobResult results = 1;</code>
*/
public java.util.List<com.google.ads.googleads.v19.services.BatchJobResult> getResultsList() {
if (resultsBuilder_ == null) {
return java.util.Collections.unmodifiableList(results_);
} else {
return resultsBuilder_.getMessageList();
}
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.BatchJobResult results = 1;</code>
*/
public int getResultsCount() {
if (resultsBuilder_ == null) {
return results_.size();
} else {
return resultsBuilder_.getCount();
}
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.BatchJobResult results = 1;</code>
*/
public com.google.ads.googleads.v19.services.BatchJobResult getResults(int index) {
if (resultsBuilder_ == null) {
return results_.get(index);
} else {
return resultsBuilder_.getMessage(index);
}
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.BatchJobResult results = 1;</code>
*/
public Builder setResults(
int index, com.google.ads.googleads.v19.services.BatchJobResult value) {
if (resultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultsIsMutable();
results_.set(index, value);
onChanged();
} else {
resultsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.BatchJobResult results = 1;</code>
*/
public Builder setResults(
int index, com.google.ads.googleads.v19.services.BatchJobResult.Builder builderForValue) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.set(index, builderForValue.build());
onChanged();
} else {
resultsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.BatchJobResult results = 1;</code>
*/
public Builder addResults(com.google.ads.googleads.v19.services.BatchJobResult value) {
if (resultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultsIsMutable();
results_.add(value);
onChanged();
} else {
resultsBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.BatchJobResult results = 1;</code>
*/
public Builder addResults(
int index, com.google.ads.googleads.v19.services.BatchJobResult value) {
if (resultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultsIsMutable();
results_.add(index, value);
onChanged();
} else {
resultsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.BatchJobResult results = 1;</code>
*/
public Builder addResults(
com.google.ads.googleads.v19.services.BatchJobResult.Builder builderForValue) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.add(builderForValue.build());
onChanged();
} else {
resultsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.BatchJobResult results = 1;</code>
*/
public Builder addResults(
int index, com.google.ads.googleads.v19.services.BatchJobResult.Builder builderForValue) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.add(index, builderForValue.build());
onChanged();
} else {
resultsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.BatchJobResult results = 1;</code>
*/
public Builder addAllResults(
java.lang.Iterable<? extends com.google.ads.googleads.v19.services.BatchJobResult> values) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, results_);
onChanged();
} else {
resultsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.BatchJobResult results = 1;</code>
*/
public Builder clearResults() {
if (resultsBuilder_ == null) {
results_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
resultsBuilder_.clear();
}
return this;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.BatchJobResult results = 1;</code>
*/
public Builder removeResults(int index) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.remove(index);
onChanged();
} else {
resultsBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.BatchJobResult results = 1;</code>
*/
public com.google.ads.googleads.v19.services.BatchJobResult.Builder getResultsBuilder(
int index) {
return getResultsFieldBuilder().getBuilder(index);
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.BatchJobResult results = 1;</code>
*/
public com.google.ads.googleads.v19.services.BatchJobResultOrBuilder getResultsOrBuilder(
int index) {
if (resultsBuilder_ == null) {
return results_.get(index); } else {
return resultsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.BatchJobResult results = 1;</code>
*/
public java.util.List<? extends com.google.ads.googleads.v19.services.BatchJobResultOrBuilder>
getResultsOrBuilderList() {
if (resultsBuilder_ != null) {
return resultsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(results_);
}
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.BatchJobResult results = 1;</code>
*/
public com.google.ads.googleads.v19.services.BatchJobResult.Builder addResultsBuilder() {
return getResultsFieldBuilder().addBuilder(
com.google.ads.googleads.v19.services.BatchJobResult.getDefaultInstance());
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.BatchJobResult results = 1;</code>
*/
public com.google.ads.googleads.v19.services.BatchJobResult.Builder addResultsBuilder(
int index) {
return getResultsFieldBuilder().addBuilder(
index, com.google.ads.googleads.v19.services.BatchJobResult.getDefaultInstance());
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v19.services.BatchJobResult results = 1;</code>
*/
public java.util.List<com.google.ads.googleads.v19.services.BatchJobResult.Builder>
getResultsBuilderList() {
return getResultsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v19.services.BatchJobResult, com.google.ads.googleads.v19.services.BatchJobResult.Builder, com.google.ads.googleads.v19.services.BatchJobResultOrBuilder>
getResultsFieldBuilder() {
if (resultsBuilder_ == null) {
resultsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v19.services.BatchJobResult, com.google.ads.googleads.v19.services.BatchJobResult.Builder, com.google.ads.googleads.v19.services.BatchJobResultOrBuilder>(
results_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
results_ = null;
}
return resultsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString
getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.services.ListBatchJobResultsResponse)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v19.services.ListBatchJobResultsResponse)
private static final com.google.ads.googleads.v19.services.ListBatchJobResultsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v19.services.ListBatchJobResultsResponse();
}
public static com.google.ads.googleads.v19.services.ListBatchJobResultsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListBatchJobResultsResponse>
PARSER = new com.google.protobuf.AbstractParser<ListBatchJobResultsResponse>() {
@java.lang.Override
public ListBatchJobResultsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListBatchJobResultsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListBatchJobResultsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v19.services.ListBatchJobResultsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 37,076 | google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/services/ListBatchJobResultsResponse.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v20/services/batch_job_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v20.services;
/**
* <pre>
* Response message for
* [BatchJobService.ListBatchJobResults][google.ads.googleads.v20.services.BatchJobService.ListBatchJobResults].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.services.ListBatchJobResultsResponse}
*/
public final class ListBatchJobResultsResponse extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v20.services.ListBatchJobResultsResponse)
ListBatchJobResultsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListBatchJobResultsResponse.newBuilder() to construct.
private ListBatchJobResultsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListBatchJobResultsResponse() {
results_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ListBatchJobResultsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.services.BatchJobServiceProto.internal_static_google_ads_googleads_v20_services_ListBatchJobResultsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.services.BatchJobServiceProto.internal_static_google_ads_googleads_v20_services_ListBatchJobResultsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.services.ListBatchJobResultsResponse.class, com.google.ads.googleads.v20.services.ListBatchJobResultsResponse.Builder.class);
}
public static final int RESULTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.ads.googleads.v20.services.BatchJobResult> results_;
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.BatchJobResult results = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.ads.googleads.v20.services.BatchJobResult> getResultsList() {
return results_;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.BatchJobResult results = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.ads.googleads.v20.services.BatchJobResultOrBuilder>
getResultsOrBuilderList() {
return results_;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.BatchJobResult results = 1;</code>
*/
@java.lang.Override
public int getResultsCount() {
return results_.size();
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.BatchJobResult results = 1;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v20.services.BatchJobResult getResults(int index) {
return results_.get(index);
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.BatchJobResult results = 1;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v20.services.BatchJobResultOrBuilder getResultsOrBuilder(
int index) {
return results_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < results_.size(); i++) {
output.writeMessage(1, results_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < results_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, results_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v20.services.ListBatchJobResultsResponse)) {
return super.equals(obj);
}
com.google.ads.googleads.v20.services.ListBatchJobResultsResponse other = (com.google.ads.googleads.v20.services.ListBatchJobResultsResponse) obj;
if (!getResultsList()
.equals(other.getResultsList())) return false;
if (!getNextPageToken()
.equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getResultsCount() > 0) {
hash = (37 * hash) + RESULTS_FIELD_NUMBER;
hash = (53 * hash) + getResultsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v20.services.ListBatchJobResultsResponse parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.ListBatchJobResultsResponse parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.ListBatchJobResultsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.ListBatchJobResultsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.ListBatchJobResultsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v20.services.ListBatchJobResultsResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.ListBatchJobResultsResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.ListBatchJobResultsResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.ListBatchJobResultsResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.ListBatchJobResultsResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v20.services.ListBatchJobResultsResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v20.services.ListBatchJobResultsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v20.services.ListBatchJobResultsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Response message for
* [BatchJobService.ListBatchJobResults][google.ads.googleads.v20.services.BatchJobService.ListBatchJobResults].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v20.services.ListBatchJobResultsResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.services.ListBatchJobResultsResponse)
com.google.ads.googleads.v20.services.ListBatchJobResultsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v20.services.BatchJobServiceProto.internal_static_google_ads_googleads_v20_services_ListBatchJobResultsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v20.services.BatchJobServiceProto.internal_static_google_ads_googleads_v20_services_ListBatchJobResultsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v20.services.ListBatchJobResultsResponse.class, com.google.ads.googleads.v20.services.ListBatchJobResultsResponse.Builder.class);
}
// Construct using com.google.ads.googleads.v20.services.ListBatchJobResultsResponse.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (resultsBuilder_ == null) {
results_ = java.util.Collections.emptyList();
} else {
results_ = null;
resultsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v20.services.BatchJobServiceProto.internal_static_google_ads_googleads_v20_services_ListBatchJobResultsResponse_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.ListBatchJobResultsResponse getDefaultInstanceForType() {
return com.google.ads.googleads.v20.services.ListBatchJobResultsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v20.services.ListBatchJobResultsResponse build() {
com.google.ads.googleads.v20.services.ListBatchJobResultsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.ListBatchJobResultsResponse buildPartial() {
com.google.ads.googleads.v20.services.ListBatchJobResultsResponse result = new com.google.ads.googleads.v20.services.ListBatchJobResultsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.ads.googleads.v20.services.ListBatchJobResultsResponse result) {
if (resultsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
results_ = java.util.Collections.unmodifiableList(results_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.results_ = results_;
} else {
result.results_ = resultsBuilder_.build();
}
}
private void buildPartial0(com.google.ads.googleads.v20.services.ListBatchJobResultsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v20.services.ListBatchJobResultsResponse) {
return mergeFrom((com.google.ads.googleads.v20.services.ListBatchJobResultsResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v20.services.ListBatchJobResultsResponse other) {
if (other == com.google.ads.googleads.v20.services.ListBatchJobResultsResponse.getDefaultInstance()) return this;
if (resultsBuilder_ == null) {
if (!other.results_.isEmpty()) {
if (results_.isEmpty()) {
results_ = other.results_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureResultsIsMutable();
results_.addAll(other.results_);
}
onChanged();
}
} else {
if (!other.results_.isEmpty()) {
if (resultsBuilder_.isEmpty()) {
resultsBuilder_.dispose();
resultsBuilder_ = null;
results_ = other.results_;
bitField0_ = (bitField0_ & ~0x00000001);
resultsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getResultsFieldBuilder() : null;
} else {
resultsBuilder_.addAllMessages(other.results_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
com.google.ads.googleads.v20.services.BatchJobResult m =
input.readMessage(
com.google.ads.googleads.v20.services.BatchJobResult.parser(),
extensionRegistry);
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.add(m);
} else {
resultsBuilder_.addMessage(m);
}
break;
} // case 10
case 18: {
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.ads.googleads.v20.services.BatchJobResult> results_ =
java.util.Collections.emptyList();
private void ensureResultsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
results_ = new java.util.ArrayList<com.google.ads.googleads.v20.services.BatchJobResult>(results_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v20.services.BatchJobResult, com.google.ads.googleads.v20.services.BatchJobResult.Builder, com.google.ads.googleads.v20.services.BatchJobResultOrBuilder> resultsBuilder_;
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.BatchJobResult results = 1;</code>
*/
public java.util.List<com.google.ads.googleads.v20.services.BatchJobResult> getResultsList() {
if (resultsBuilder_ == null) {
return java.util.Collections.unmodifiableList(results_);
} else {
return resultsBuilder_.getMessageList();
}
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.BatchJobResult results = 1;</code>
*/
public int getResultsCount() {
if (resultsBuilder_ == null) {
return results_.size();
} else {
return resultsBuilder_.getCount();
}
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.BatchJobResult results = 1;</code>
*/
public com.google.ads.googleads.v20.services.BatchJobResult getResults(int index) {
if (resultsBuilder_ == null) {
return results_.get(index);
} else {
return resultsBuilder_.getMessage(index);
}
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.BatchJobResult results = 1;</code>
*/
public Builder setResults(
int index, com.google.ads.googleads.v20.services.BatchJobResult value) {
if (resultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultsIsMutable();
results_.set(index, value);
onChanged();
} else {
resultsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.BatchJobResult results = 1;</code>
*/
public Builder setResults(
int index, com.google.ads.googleads.v20.services.BatchJobResult.Builder builderForValue) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.set(index, builderForValue.build());
onChanged();
} else {
resultsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.BatchJobResult results = 1;</code>
*/
public Builder addResults(com.google.ads.googleads.v20.services.BatchJobResult value) {
if (resultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultsIsMutable();
results_.add(value);
onChanged();
} else {
resultsBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.BatchJobResult results = 1;</code>
*/
public Builder addResults(
int index, com.google.ads.googleads.v20.services.BatchJobResult value) {
if (resultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultsIsMutable();
results_.add(index, value);
onChanged();
} else {
resultsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.BatchJobResult results = 1;</code>
*/
public Builder addResults(
com.google.ads.googleads.v20.services.BatchJobResult.Builder builderForValue) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.add(builderForValue.build());
onChanged();
} else {
resultsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.BatchJobResult results = 1;</code>
*/
public Builder addResults(
int index, com.google.ads.googleads.v20.services.BatchJobResult.Builder builderForValue) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.add(index, builderForValue.build());
onChanged();
} else {
resultsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.BatchJobResult results = 1;</code>
*/
public Builder addAllResults(
java.lang.Iterable<? extends com.google.ads.googleads.v20.services.BatchJobResult> values) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, results_);
onChanged();
} else {
resultsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.BatchJobResult results = 1;</code>
*/
public Builder clearResults() {
if (resultsBuilder_ == null) {
results_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
resultsBuilder_.clear();
}
return this;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.BatchJobResult results = 1;</code>
*/
public Builder removeResults(int index) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.remove(index);
onChanged();
} else {
resultsBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.BatchJobResult results = 1;</code>
*/
public com.google.ads.googleads.v20.services.BatchJobResult.Builder getResultsBuilder(
int index) {
return getResultsFieldBuilder().getBuilder(index);
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.BatchJobResult results = 1;</code>
*/
public com.google.ads.googleads.v20.services.BatchJobResultOrBuilder getResultsOrBuilder(
int index) {
if (resultsBuilder_ == null) {
return results_.get(index); } else {
return resultsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.BatchJobResult results = 1;</code>
*/
public java.util.List<? extends com.google.ads.googleads.v20.services.BatchJobResultOrBuilder>
getResultsOrBuilderList() {
if (resultsBuilder_ != null) {
return resultsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(results_);
}
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.BatchJobResult results = 1;</code>
*/
public com.google.ads.googleads.v20.services.BatchJobResult.Builder addResultsBuilder() {
return getResultsFieldBuilder().addBuilder(
com.google.ads.googleads.v20.services.BatchJobResult.getDefaultInstance());
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.BatchJobResult results = 1;</code>
*/
public com.google.ads.googleads.v20.services.BatchJobResult.Builder addResultsBuilder(
int index) {
return getResultsFieldBuilder().addBuilder(
index, com.google.ads.googleads.v20.services.BatchJobResult.getDefaultInstance());
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v20.services.BatchJobResult results = 1;</code>
*/
public java.util.List<com.google.ads.googleads.v20.services.BatchJobResult.Builder>
getResultsBuilderList() {
return getResultsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v20.services.BatchJobResult, com.google.ads.googleads.v20.services.BatchJobResult.Builder, com.google.ads.googleads.v20.services.BatchJobResultOrBuilder>
getResultsFieldBuilder() {
if (resultsBuilder_ == null) {
resultsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v20.services.BatchJobResult, com.google.ads.googleads.v20.services.BatchJobResult.Builder, com.google.ads.googleads.v20.services.BatchJobResultOrBuilder>(
results_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
results_ = null;
}
return resultsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString
getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.services.ListBatchJobResultsResponse)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v20.services.ListBatchJobResultsResponse)
private static final com.google.ads.googleads.v20.services.ListBatchJobResultsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v20.services.ListBatchJobResultsResponse();
}
public static com.google.ads.googleads.v20.services.ListBatchJobResultsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListBatchJobResultsResponse>
PARSER = new com.google.protobuf.AbstractParser<ListBatchJobResultsResponse>() {
@java.lang.Override
public ListBatchJobResultsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListBatchJobResultsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListBatchJobResultsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v20.services.ListBatchJobResultsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleads/google-ads-java | 37,076 | google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/services/ListBatchJobResultsResponse.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/googleads/v21/services/batch_job_service.proto
// Protobuf Java Version: 3.25.7
package com.google.ads.googleads.v21.services;
/**
* <pre>
* Response message for
* [BatchJobService.ListBatchJobResults][google.ads.googleads.v21.services.BatchJobService.ListBatchJobResults].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.services.ListBatchJobResultsResponse}
*/
public final class ListBatchJobResultsResponse extends
com.google.protobuf.GeneratedMessageV3 implements
// @@protoc_insertion_point(message_implements:google.ads.googleads.v21.services.ListBatchJobResultsResponse)
ListBatchJobResultsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListBatchJobResultsResponse.newBuilder() to construct.
private ListBatchJobResultsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListBatchJobResultsResponse() {
results_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(
UnusedPrivateParameter unused) {
return new ListBatchJobResultsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.services.BatchJobServiceProto.internal_static_google_ads_googleads_v21_services_ListBatchJobResultsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.services.BatchJobServiceProto.internal_static_google_ads_googleads_v21_services_ListBatchJobResultsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.services.ListBatchJobResultsResponse.class, com.google.ads.googleads.v21.services.ListBatchJobResultsResponse.Builder.class);
}
public static final int RESULTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.ads.googleads.v21.services.BatchJobResult> results_;
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.BatchJobResult results = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.ads.googleads.v21.services.BatchJobResult> getResultsList() {
return results_;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.BatchJobResult results = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.ads.googleads.v21.services.BatchJobResultOrBuilder>
getResultsOrBuilderList() {
return results_;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.BatchJobResult results = 1;</code>
*/
@java.lang.Override
public int getResultsCount() {
return results_.size();
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.BatchJobResult results = 1;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v21.services.BatchJobResult getResults(int index) {
return results_.get(index);
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.BatchJobResult results = 1;</code>
*/
@java.lang.Override
public com.google.ads.googleads.v21.services.BatchJobResultOrBuilder getResultsOrBuilder(
int index) {
return results_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString
getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output)
throws java.io.IOException {
for (int i = 0; i < results_.size(); i++) {
output.writeMessage(1, results_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < results_.size(); i++) {
size += com.google.protobuf.CodedOutputStream
.computeMessageSize(1, results_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.googleads.v21.services.ListBatchJobResultsResponse)) {
return super.equals(obj);
}
com.google.ads.googleads.v21.services.ListBatchJobResultsResponse other = (com.google.ads.googleads.v21.services.ListBatchJobResultsResponse) obj;
if (!getResultsList()
.equals(other.getResultsList())) return false;
if (!getNextPageToken()
.equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getResultsCount() > 0) {
hash = (37 * hash) + RESULTS_FIELD_NUMBER;
hash = (53 * hash) + getResultsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.googleads.v21.services.ListBatchJobResultsResponse parseFrom(
java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.ListBatchJobResultsResponse parseFrom(
java.nio.ByteBuffer data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ListBatchJobResultsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.ListBatchJobResultsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ListBatchJobResultsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.googleads.v21.services.ListBatchJobResultsResponse parseFrom(
byte[] data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ListBatchJobResultsResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.ListBatchJobResultsResponse parseFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ListBatchJobResultsResponse parseDelimitedFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.ListBatchJobResultsResponse parseDelimitedFrom(
java.io.InputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseDelimitedWithIOException(PARSER, input, extensionRegistry);
}
public static com.google.ads.googleads.v21.services.ListBatchJobResultsResponse parseFrom(
com.google.protobuf.CodedInputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input);
}
public static com.google.ads.googleads.v21.services.ListBatchJobResultsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3
.parseWithIOException(PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() { return newBuilder(); }
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.googleads.v21.services.ListBatchJobResultsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE
? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
* <pre>
* Response message for
* [BatchJobService.ListBatchJobResults][google.ads.googleads.v21.services.BatchJobService.ListBatchJobResults].
* </pre>
*
* Protobuf type {@code google.ads.googleads.v21.services.ListBatchJobResultsResponse}
*/
public static final class Builder extends
com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements
// @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.services.ListBatchJobResultsResponse)
com.google.ads.googleads.v21.services.ListBatchJobResultsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor
getDescriptor() {
return com.google.ads.googleads.v21.services.BatchJobServiceProto.internal_static_google_ads_googleads_v21_services_ListBatchJobResultsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.googleads.v21.services.BatchJobServiceProto.internal_static_google_ads_googleads_v21_services_ListBatchJobResultsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.googleads.v21.services.ListBatchJobResultsResponse.class, com.google.ads.googleads.v21.services.ListBatchJobResultsResponse.Builder.class);
}
// Construct using com.google.ads.googleads.v21.services.ListBatchJobResultsResponse.newBuilder()
private Builder() {
}
private Builder(
com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (resultsBuilder_ == null) {
results_ = java.util.Collections.emptyList();
} else {
results_ = null;
resultsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor
getDescriptorForType() {
return com.google.ads.googleads.v21.services.BatchJobServiceProto.internal_static_google_ads_googleads_v21_services_ListBatchJobResultsResponse_descriptor;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.ListBatchJobResultsResponse getDefaultInstanceForType() {
return com.google.ads.googleads.v21.services.ListBatchJobResultsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.googleads.v21.services.ListBatchJobResultsResponse build() {
com.google.ads.googleads.v21.services.ListBatchJobResultsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.ListBatchJobResultsResponse buildPartial() {
com.google.ads.googleads.v21.services.ListBatchJobResultsResponse result = new com.google.ads.googleads.v21.services.ListBatchJobResultsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) { buildPartial0(result); }
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.ads.googleads.v21.services.ListBatchJobResultsResponse result) {
if (resultsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
results_ = java.util.Collections.unmodifiableList(results_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.results_ = results_;
} else {
result.results_ = resultsBuilder_.build();
}
}
private void buildPartial0(com.google.ads.googleads.v21.services.ListBatchJobResultsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(
com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(
com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field,
java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.googleads.v21.services.ListBatchJobResultsResponse) {
return mergeFrom((com.google.ads.googleads.v21.services.ListBatchJobResultsResponse)other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.googleads.v21.services.ListBatchJobResultsResponse other) {
if (other == com.google.ads.googleads.v21.services.ListBatchJobResultsResponse.getDefaultInstance()) return this;
if (resultsBuilder_ == null) {
if (!other.results_.isEmpty()) {
if (results_.isEmpty()) {
results_ = other.results_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureResultsIsMutable();
results_.addAll(other.results_);
}
onChanged();
}
} else {
if (!other.results_.isEmpty()) {
if (resultsBuilder_.isEmpty()) {
resultsBuilder_.dispose();
resultsBuilder_ = null;
results_ = other.results_;
bitField0_ = (bitField0_ & ~0x00000001);
resultsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ?
getResultsFieldBuilder() : null;
} else {
resultsBuilder_.addAllMessages(other.results_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10: {
com.google.ads.googleads.v21.services.BatchJobResult m =
input.readMessage(
com.google.ads.googleads.v21.services.BatchJobResult.parser(),
extensionRegistry);
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.add(m);
} else {
resultsBuilder_.addMessage(m);
}
break;
} // case 10
case 18: {
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default: {
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.ads.googleads.v21.services.BatchJobResult> results_ =
java.util.Collections.emptyList();
private void ensureResultsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
results_ = new java.util.ArrayList<com.google.ads.googleads.v21.services.BatchJobResult>(results_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v21.services.BatchJobResult, com.google.ads.googleads.v21.services.BatchJobResult.Builder, com.google.ads.googleads.v21.services.BatchJobResultOrBuilder> resultsBuilder_;
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.BatchJobResult results = 1;</code>
*/
public java.util.List<com.google.ads.googleads.v21.services.BatchJobResult> getResultsList() {
if (resultsBuilder_ == null) {
return java.util.Collections.unmodifiableList(results_);
} else {
return resultsBuilder_.getMessageList();
}
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.BatchJobResult results = 1;</code>
*/
public int getResultsCount() {
if (resultsBuilder_ == null) {
return results_.size();
} else {
return resultsBuilder_.getCount();
}
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.BatchJobResult results = 1;</code>
*/
public com.google.ads.googleads.v21.services.BatchJobResult getResults(int index) {
if (resultsBuilder_ == null) {
return results_.get(index);
} else {
return resultsBuilder_.getMessage(index);
}
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.BatchJobResult results = 1;</code>
*/
public Builder setResults(
int index, com.google.ads.googleads.v21.services.BatchJobResult value) {
if (resultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultsIsMutable();
results_.set(index, value);
onChanged();
} else {
resultsBuilder_.setMessage(index, value);
}
return this;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.BatchJobResult results = 1;</code>
*/
public Builder setResults(
int index, com.google.ads.googleads.v21.services.BatchJobResult.Builder builderForValue) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.set(index, builderForValue.build());
onChanged();
} else {
resultsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.BatchJobResult results = 1;</code>
*/
public Builder addResults(com.google.ads.googleads.v21.services.BatchJobResult value) {
if (resultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultsIsMutable();
results_.add(value);
onChanged();
} else {
resultsBuilder_.addMessage(value);
}
return this;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.BatchJobResult results = 1;</code>
*/
public Builder addResults(
int index, com.google.ads.googleads.v21.services.BatchJobResult value) {
if (resultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultsIsMutable();
results_.add(index, value);
onChanged();
} else {
resultsBuilder_.addMessage(index, value);
}
return this;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.BatchJobResult results = 1;</code>
*/
public Builder addResults(
com.google.ads.googleads.v21.services.BatchJobResult.Builder builderForValue) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.add(builderForValue.build());
onChanged();
} else {
resultsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.BatchJobResult results = 1;</code>
*/
public Builder addResults(
int index, com.google.ads.googleads.v21.services.BatchJobResult.Builder builderForValue) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.add(index, builderForValue.build());
onChanged();
} else {
resultsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.BatchJobResult results = 1;</code>
*/
public Builder addAllResults(
java.lang.Iterable<? extends com.google.ads.googleads.v21.services.BatchJobResult> values) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(
values, results_);
onChanged();
} else {
resultsBuilder_.addAllMessages(values);
}
return this;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.BatchJobResult results = 1;</code>
*/
public Builder clearResults() {
if (resultsBuilder_ == null) {
results_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
resultsBuilder_.clear();
}
return this;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.BatchJobResult results = 1;</code>
*/
public Builder removeResults(int index) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.remove(index);
onChanged();
} else {
resultsBuilder_.remove(index);
}
return this;
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.BatchJobResult results = 1;</code>
*/
public com.google.ads.googleads.v21.services.BatchJobResult.Builder getResultsBuilder(
int index) {
return getResultsFieldBuilder().getBuilder(index);
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.BatchJobResult results = 1;</code>
*/
public com.google.ads.googleads.v21.services.BatchJobResultOrBuilder getResultsOrBuilder(
int index) {
if (resultsBuilder_ == null) {
return results_.get(index); } else {
return resultsBuilder_.getMessageOrBuilder(index);
}
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.BatchJobResult results = 1;</code>
*/
public java.util.List<? extends com.google.ads.googleads.v21.services.BatchJobResultOrBuilder>
getResultsOrBuilderList() {
if (resultsBuilder_ != null) {
return resultsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(results_);
}
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.BatchJobResult results = 1;</code>
*/
public com.google.ads.googleads.v21.services.BatchJobResult.Builder addResultsBuilder() {
return getResultsFieldBuilder().addBuilder(
com.google.ads.googleads.v21.services.BatchJobResult.getDefaultInstance());
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.BatchJobResult results = 1;</code>
*/
public com.google.ads.googleads.v21.services.BatchJobResult.Builder addResultsBuilder(
int index) {
return getResultsFieldBuilder().addBuilder(
index, com.google.ads.googleads.v21.services.BatchJobResult.getDefaultInstance());
}
/**
* <pre>
* The list of rows that matched the query.
* </pre>
*
* <code>repeated .google.ads.googleads.v21.services.BatchJobResult results = 1;</code>
*/
public java.util.List<com.google.ads.googleads.v21.services.BatchJobResult.Builder>
getResultsBuilderList() {
return getResultsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v21.services.BatchJobResult, com.google.ads.googleads.v21.services.BatchJobResult.Builder, com.google.ads.googleads.v21.services.BatchJobResultOrBuilder>
getResultsFieldBuilder() {
if (resultsBuilder_ == null) {
resultsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.ads.googleads.v21.services.BatchJobResult, com.google.ads.googleads.v21.services.BatchJobResult.Builder, com.google.ads.googleads.v21.services.BatchJobResultOrBuilder>(
results_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
results_ = null;
}
return resultsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs =
(com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString
getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8(
(java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(
java.lang.String value) {
if (value == null) { throw new NullPointerException(); }
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
* <pre>
* Pagination token used to retrieve the next page of results.
* Pass the content of this string as the `page_token` attribute of
* the next request. `next_page_token` is not returned for the last
* page.
* </pre>
*
* <code>string next_page_token = 2;</code>
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(
com.google.protobuf.ByteString value) {
if (value == null) { throw new NullPointerException(); }
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.services.ListBatchJobResultsResponse)
}
// @@protoc_insertion_point(class_scope:google.ads.googleads.v21.services.ListBatchJobResultsResponse)
private static final com.google.ads.googleads.v21.services.ListBatchJobResultsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.googleads.v21.services.ListBatchJobResultsResponse();
}
public static com.google.ads.googleads.v21.services.ListBatchJobResultsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListBatchJobResultsResponse>
PARSER = new com.google.protobuf.AbstractParser<ListBatchJobResultsResponse>() {
@java.lang.Override
public ListBatchJobResultsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListBatchJobResultsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListBatchJobResultsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.googleads.v21.services.ListBatchJobResultsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,006 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/schema/predict/params/VideoClassificationPredictionParams.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/schema/predict/params/video_classification.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1.schema.predict.params;
/**
*
*
* <pre>
* Prediction model parameters for Video Classification.
* </pre>
*
* Protobuf type {@code
* google.cloud.aiplatform.v1.schema.predict.params.VideoClassificationPredictionParams}
*/
public final class VideoClassificationPredictionParams
extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.schema.predict.params.VideoClassificationPredictionParams)
VideoClassificationPredictionParamsOrBuilder {
private static final long serialVersionUID = 0L;
// Use VideoClassificationPredictionParams.newBuilder() to construct.
private VideoClassificationPredictionParams(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private VideoClassificationPredictionParams() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new VideoClassificationPredictionParams();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.schema.predict.params
.VideoClassificationPredictionParamsProto
.internal_static_google_cloud_aiplatform_v1_schema_predict_params_VideoClassificationPredictionParams_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.schema.predict.params
.VideoClassificationPredictionParamsProto
.internal_static_google_cloud_aiplatform_v1_schema_predict_params_VideoClassificationPredictionParams_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.schema.predict.params.VideoClassificationPredictionParams
.class,
com.google.cloud.aiplatform.v1.schema.predict.params.VideoClassificationPredictionParams
.Builder.class);
}
public static final int CONFIDENCE_THRESHOLD_FIELD_NUMBER = 1;
private float confidenceThreshold_ = 0F;
/**
*
*
* <pre>
* The Model only returns predictions with at least this confidence score.
* Default value is 0.0
* </pre>
*
* <code>float confidence_threshold = 1;</code>
*
* @return The confidenceThreshold.
*/
@java.lang.Override
public float getConfidenceThreshold() {
return confidenceThreshold_;
}
public static final int MAX_PREDICTIONS_FIELD_NUMBER = 2;
private int maxPredictions_ = 0;
/**
*
*
* <pre>
* The Model only returns up to that many top, by confidence score,
* predictions per instance. If this number is very high, the Model may return
* fewer predictions. Default value is 10,000.
* </pre>
*
* <code>int32 max_predictions = 2;</code>
*
* @return The maxPredictions.
*/
@java.lang.Override
public int getMaxPredictions() {
return maxPredictions_;
}
public static final int SEGMENT_CLASSIFICATION_FIELD_NUMBER = 3;
private boolean segmentClassification_ = false;
/**
*
*
* <pre>
* Set to true to request segment-level classification. Vertex AI returns
* labels and their confidence scores for the entire time segment of the
* video that user specified in the input instance.
* Default value is true
* </pre>
*
* <code>bool segment_classification = 3;</code>
*
* @return The segmentClassification.
*/
@java.lang.Override
public boolean getSegmentClassification() {
return segmentClassification_;
}
public static final int SHOT_CLASSIFICATION_FIELD_NUMBER = 4;
private boolean shotClassification_ = false;
/**
*
*
* <pre>
* Set to true to request shot-level classification. Vertex AI determines
* the boundaries for each camera shot in the entire time segment of the
* video that user specified in the input instance. Vertex AI then
* returns labels and their confidence scores for each detected shot, along
* with the start and end time of the shot.
* WARNING: Model evaluation is not done for this classification type,
* the quality of it depends on the training data, but there are no metrics
* provided to describe that quality.
* Default value is false
* </pre>
*
* <code>bool shot_classification = 4;</code>
*
* @return The shotClassification.
*/
@java.lang.Override
public boolean getShotClassification() {
return shotClassification_;
}
public static final int ONE_SEC_INTERVAL_CLASSIFICATION_FIELD_NUMBER = 5;
private boolean oneSecIntervalClassification_ = false;
/**
*
*
* <pre>
* Set to true to request classification for a video at one-second intervals.
* Vertex AI returns labels and their confidence scores for each second of
* the entire time segment of the video that user specified in the input
* WARNING: Model evaluation is not done for this classification type, the
* quality of it depends on the training data, but there are no metrics
* provided to describe that quality. Default value is false
* </pre>
*
* <code>bool one_sec_interval_classification = 5;</code>
*
* @return The oneSecIntervalClassification.
*/
@java.lang.Override
public boolean getOneSecIntervalClassification() {
return oneSecIntervalClassification_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (java.lang.Float.floatToRawIntBits(confidenceThreshold_) != 0) {
output.writeFloat(1, confidenceThreshold_);
}
if (maxPredictions_ != 0) {
output.writeInt32(2, maxPredictions_);
}
if (segmentClassification_ != false) {
output.writeBool(3, segmentClassification_);
}
if (shotClassification_ != false) {
output.writeBool(4, shotClassification_);
}
if (oneSecIntervalClassification_ != false) {
output.writeBool(5, oneSecIntervalClassification_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (java.lang.Float.floatToRawIntBits(confidenceThreshold_) != 0) {
size += com.google.protobuf.CodedOutputStream.computeFloatSize(1, confidenceThreshold_);
}
if (maxPredictions_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, maxPredictions_);
}
if (segmentClassification_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, segmentClassification_);
}
if (shotClassification_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(4, shotClassification_);
}
if (oneSecIntervalClassification_ != false) {
size +=
com.google.protobuf.CodedOutputStream.computeBoolSize(5, oneSecIntervalClassification_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj
instanceof
com.google.cloud.aiplatform.v1.schema.predict.params.VideoClassificationPredictionParams)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.schema.predict.params.VideoClassificationPredictionParams other =
(com.google.cloud.aiplatform.v1.schema.predict.params.VideoClassificationPredictionParams)
obj;
if (java.lang.Float.floatToIntBits(getConfidenceThreshold())
!= java.lang.Float.floatToIntBits(other.getConfidenceThreshold())) return false;
if (getMaxPredictions() != other.getMaxPredictions()) return false;
if (getSegmentClassification() != other.getSegmentClassification()) return false;
if (getShotClassification() != other.getShotClassification()) return false;
if (getOneSecIntervalClassification() != other.getOneSecIntervalClassification()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + CONFIDENCE_THRESHOLD_FIELD_NUMBER;
hash = (53 * hash) + java.lang.Float.floatToIntBits(getConfidenceThreshold());
hash = (37 * hash) + MAX_PREDICTIONS_FIELD_NUMBER;
hash = (53 * hash) + getMaxPredictions();
hash = (37 * hash) + SEGMENT_CLASSIFICATION_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getSegmentClassification());
hash = (37 * hash) + SHOT_CLASSIFICATION_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getShotClassification());
hash = (37 * hash) + ONE_SEC_INTERVAL_CLASSIFICATION_FIELD_NUMBER;
hash =
(53 * hash) + com.google.protobuf.Internal.hashBoolean(getOneSecIntervalClassification());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.schema.predict.params
.VideoClassificationPredictionParams
parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.schema.predict.params
.VideoClassificationPredictionParams
parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.schema.predict.params
.VideoClassificationPredictionParams
parseFrom(com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.schema.predict.params
.VideoClassificationPredictionParams
parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.schema.predict.params
.VideoClassificationPredictionParams
parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.schema.predict.params
.VideoClassificationPredictionParams
parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.schema.predict.params
.VideoClassificationPredictionParams
parseFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.schema.predict.params
.VideoClassificationPredictionParams
parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.schema.predict.params
.VideoClassificationPredictionParams
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.schema.predict.params
.VideoClassificationPredictionParams
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.schema.predict.params
.VideoClassificationPredictionParams
parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.schema.predict.params
.VideoClassificationPredictionParams
parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.aiplatform.v1.schema.predict.params.VideoClassificationPredictionParams
prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Prediction model parameters for Video Classification.
* </pre>
*
* Protobuf type {@code
* google.cloud.aiplatform.v1.schema.predict.params.VideoClassificationPredictionParams}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.schema.predict.params.VideoClassificationPredictionParams)
com.google.cloud.aiplatform.v1.schema.predict.params
.VideoClassificationPredictionParamsOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.schema.predict.params
.VideoClassificationPredictionParamsProto
.internal_static_google_cloud_aiplatform_v1_schema_predict_params_VideoClassificationPredictionParams_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.schema.predict.params
.VideoClassificationPredictionParamsProto
.internal_static_google_cloud_aiplatform_v1_schema_predict_params_VideoClassificationPredictionParams_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.schema.predict.params
.VideoClassificationPredictionParams.class,
com.google.cloud.aiplatform.v1.schema.predict.params
.VideoClassificationPredictionParams.Builder.class);
}
// Construct using
// com.google.cloud.aiplatform.v1.schema.predict.params.VideoClassificationPredictionParams.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
confidenceThreshold_ = 0F;
maxPredictions_ = 0;
segmentClassification_ = false;
shotClassification_ = false;
oneSecIntervalClassification_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.schema.predict.params
.VideoClassificationPredictionParamsProto
.internal_static_google_cloud_aiplatform_v1_schema_predict_params_VideoClassificationPredictionParams_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.schema.predict.params.VideoClassificationPredictionParams
getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.schema.predict.params
.VideoClassificationPredictionParams.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.schema.predict.params.VideoClassificationPredictionParams
build() {
com.google.cloud.aiplatform.v1.schema.predict.params.VideoClassificationPredictionParams
result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.schema.predict.params.VideoClassificationPredictionParams
buildPartial() {
com.google.cloud.aiplatform.v1.schema.predict.params.VideoClassificationPredictionParams
result =
new com.google.cloud.aiplatform.v1.schema.predict.params
.VideoClassificationPredictionParams(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.aiplatform.v1.schema.predict.params.VideoClassificationPredictionParams
result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.confidenceThreshold_ = confidenceThreshold_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.maxPredictions_ = maxPredictions_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.segmentClassification_ = segmentClassification_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.shotClassification_ = shotClassification_;
}
if (((from_bitField0_ & 0x00000010) != 0)) {
result.oneSecIntervalClassification_ = oneSecIntervalClassification_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other
instanceof
com.google.cloud.aiplatform.v1.schema.predict.params
.VideoClassificationPredictionParams) {
return mergeFrom(
(com.google.cloud.aiplatform.v1.schema.predict.params
.VideoClassificationPredictionParams)
other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.aiplatform.v1.schema.predict.params.VideoClassificationPredictionParams
other) {
if (other
== com.google.cloud.aiplatform.v1.schema.predict.params
.VideoClassificationPredictionParams.getDefaultInstance()) return this;
if (other.getConfidenceThreshold() != 0F) {
setConfidenceThreshold(other.getConfidenceThreshold());
}
if (other.getMaxPredictions() != 0) {
setMaxPredictions(other.getMaxPredictions());
}
if (other.getSegmentClassification() != false) {
setSegmentClassification(other.getSegmentClassification());
}
if (other.getShotClassification() != false) {
setShotClassification(other.getShotClassification());
}
if (other.getOneSecIntervalClassification() != false) {
setOneSecIntervalClassification(other.getOneSecIntervalClassification());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 13:
{
confidenceThreshold_ = input.readFloat();
bitField0_ |= 0x00000001;
break;
} // case 13
case 16:
{
maxPredictions_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 24:
{
segmentClassification_ = input.readBool();
bitField0_ |= 0x00000004;
break;
} // case 24
case 32:
{
shotClassification_ = input.readBool();
bitField0_ |= 0x00000008;
break;
} // case 32
case 40:
{
oneSecIntervalClassification_ = input.readBool();
bitField0_ |= 0x00000010;
break;
} // case 40
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private float confidenceThreshold_;
/**
*
*
* <pre>
* The Model only returns predictions with at least this confidence score.
* Default value is 0.0
* </pre>
*
* <code>float confidence_threshold = 1;</code>
*
* @return The confidenceThreshold.
*/
@java.lang.Override
public float getConfidenceThreshold() {
return confidenceThreshold_;
}
/**
*
*
* <pre>
* The Model only returns predictions with at least this confidence score.
* Default value is 0.0
* </pre>
*
* <code>float confidence_threshold = 1;</code>
*
* @param value The confidenceThreshold to set.
* @return This builder for chaining.
*/
public Builder setConfidenceThreshold(float value) {
confidenceThreshold_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The Model only returns predictions with at least this confidence score.
* Default value is 0.0
* </pre>
*
* <code>float confidence_threshold = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearConfidenceThreshold() {
bitField0_ = (bitField0_ & ~0x00000001);
confidenceThreshold_ = 0F;
onChanged();
return this;
}
private int maxPredictions_;
/**
*
*
* <pre>
* The Model only returns up to that many top, by confidence score,
* predictions per instance. If this number is very high, the Model may return
* fewer predictions. Default value is 10,000.
* </pre>
*
* <code>int32 max_predictions = 2;</code>
*
* @return The maxPredictions.
*/
@java.lang.Override
public int getMaxPredictions() {
return maxPredictions_;
}
/**
*
*
* <pre>
* The Model only returns up to that many top, by confidence score,
* predictions per instance. If this number is very high, the Model may return
* fewer predictions. Default value is 10,000.
* </pre>
*
* <code>int32 max_predictions = 2;</code>
*
* @param value The maxPredictions to set.
* @return This builder for chaining.
*/
public Builder setMaxPredictions(int value) {
maxPredictions_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The Model only returns up to that many top, by confidence score,
* predictions per instance. If this number is very high, the Model may return
* fewer predictions. Default value is 10,000.
* </pre>
*
* <code>int32 max_predictions = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearMaxPredictions() {
bitField0_ = (bitField0_ & ~0x00000002);
maxPredictions_ = 0;
onChanged();
return this;
}
private boolean segmentClassification_;
/**
*
*
* <pre>
* Set to true to request segment-level classification. Vertex AI returns
* labels and their confidence scores for the entire time segment of the
* video that user specified in the input instance.
* Default value is true
* </pre>
*
* <code>bool segment_classification = 3;</code>
*
* @return The segmentClassification.
*/
@java.lang.Override
public boolean getSegmentClassification() {
return segmentClassification_;
}
/**
*
*
* <pre>
* Set to true to request segment-level classification. Vertex AI returns
* labels and their confidence scores for the entire time segment of the
* video that user specified in the input instance.
* Default value is true
* </pre>
*
* <code>bool segment_classification = 3;</code>
*
* @param value The segmentClassification to set.
* @return This builder for chaining.
*/
public Builder setSegmentClassification(boolean value) {
segmentClassification_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Set to true to request segment-level classification. Vertex AI returns
* labels and their confidence scores for the entire time segment of the
* video that user specified in the input instance.
* Default value is true
* </pre>
*
* <code>bool segment_classification = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearSegmentClassification() {
bitField0_ = (bitField0_ & ~0x00000004);
segmentClassification_ = false;
onChanged();
return this;
}
private boolean shotClassification_;
/**
*
*
* <pre>
* Set to true to request shot-level classification. Vertex AI determines
* the boundaries for each camera shot in the entire time segment of the
* video that user specified in the input instance. Vertex AI then
* returns labels and their confidence scores for each detected shot, along
* with the start and end time of the shot.
* WARNING: Model evaluation is not done for this classification type,
* the quality of it depends on the training data, but there are no metrics
* provided to describe that quality.
* Default value is false
* </pre>
*
* <code>bool shot_classification = 4;</code>
*
* @return The shotClassification.
*/
@java.lang.Override
public boolean getShotClassification() {
return shotClassification_;
}
/**
*
*
* <pre>
* Set to true to request shot-level classification. Vertex AI determines
* the boundaries for each camera shot in the entire time segment of the
* video that user specified in the input instance. Vertex AI then
* returns labels and their confidence scores for each detected shot, along
* with the start and end time of the shot.
* WARNING: Model evaluation is not done for this classification type,
* the quality of it depends on the training data, but there are no metrics
* provided to describe that quality.
* Default value is false
* </pre>
*
* <code>bool shot_classification = 4;</code>
*
* @param value The shotClassification to set.
* @return This builder for chaining.
*/
public Builder setShotClassification(boolean value) {
shotClassification_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Set to true to request shot-level classification. Vertex AI determines
* the boundaries for each camera shot in the entire time segment of the
* video that user specified in the input instance. Vertex AI then
* returns labels and their confidence scores for each detected shot, along
* with the start and end time of the shot.
* WARNING: Model evaluation is not done for this classification type,
* the quality of it depends on the training data, but there are no metrics
* provided to describe that quality.
* Default value is false
* </pre>
*
* <code>bool shot_classification = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearShotClassification() {
bitField0_ = (bitField0_ & ~0x00000008);
shotClassification_ = false;
onChanged();
return this;
}
private boolean oneSecIntervalClassification_;
/**
*
*
* <pre>
* Set to true to request classification for a video at one-second intervals.
* Vertex AI returns labels and their confidence scores for each second of
* the entire time segment of the video that user specified in the input
* WARNING: Model evaluation is not done for this classification type, the
* quality of it depends on the training data, but there are no metrics
* provided to describe that quality. Default value is false
* </pre>
*
* <code>bool one_sec_interval_classification = 5;</code>
*
* @return The oneSecIntervalClassification.
*/
@java.lang.Override
public boolean getOneSecIntervalClassification() {
return oneSecIntervalClassification_;
}
/**
*
*
* <pre>
* Set to true to request classification for a video at one-second intervals.
* Vertex AI returns labels and their confidence scores for each second of
* the entire time segment of the video that user specified in the input
* WARNING: Model evaluation is not done for this classification type, the
* quality of it depends on the training data, but there are no metrics
* provided to describe that quality. Default value is false
* </pre>
*
* <code>bool one_sec_interval_classification = 5;</code>
*
* @param value The oneSecIntervalClassification to set.
* @return This builder for chaining.
*/
public Builder setOneSecIntervalClassification(boolean value) {
oneSecIntervalClassification_ = value;
bitField0_ |= 0x00000010;
onChanged();
return this;
}
/**
*
*
* <pre>
* Set to true to request classification for a video at one-second intervals.
* Vertex AI returns labels and their confidence scores for each second of
* the entire time segment of the video that user specified in the input
* WARNING: Model evaluation is not done for this classification type, the
* quality of it depends on the training data, but there are no metrics
* provided to describe that quality. Default value is false
* </pre>
*
* <code>bool one_sec_interval_classification = 5;</code>
*
* @return This builder for chaining.
*/
public Builder clearOneSecIntervalClassification() {
bitField0_ = (bitField0_ & ~0x00000010);
oneSecIntervalClassification_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.schema.predict.params.VideoClassificationPredictionParams)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.schema.predict.params.VideoClassificationPredictionParams)
private static final com.google.cloud.aiplatform.v1.schema.predict.params
.VideoClassificationPredictionParams
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE =
new com.google.cloud.aiplatform.v1.schema.predict.params
.VideoClassificationPredictionParams();
}
public static com.google.cloud.aiplatform.v1.schema.predict.params
.VideoClassificationPredictionParams
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<VideoClassificationPredictionParams> PARSER =
new com.google.protobuf.AbstractParser<VideoClassificationPredictionParams>() {
@java.lang.Override
public VideoClassificationPredictionParams parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<VideoClassificationPredictionParams> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<VideoClassificationPredictionParams> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.schema.predict.params.VideoClassificationPredictionParams
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
openjdk/jdk8 | 36,749 | langtools/test/tools/javac/parser/JavacParserTest.java | /*
* Copyright (c) 2011, 2012, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
/*
* @test
* @bug 7073631 7159445 7156633
* @summary tests error and diagnostics positions
* @author Jan Lahoda
*/
import com.sun.source.tree.BinaryTree;
import com.sun.source.tree.BlockTree;
import com.sun.source.tree.ClassTree;
import com.sun.source.tree.CompilationUnitTree;
import com.sun.source.tree.ErroneousTree;
import com.sun.source.tree.ExpressionStatementTree;
import com.sun.source.tree.ExpressionTree;
import com.sun.source.tree.MethodInvocationTree;
import com.sun.source.tree.MethodTree;
import com.sun.source.tree.ModifiersTree;
import com.sun.source.tree.StatementTree;
import com.sun.source.tree.Tree;
import com.sun.source.tree.Tree.Kind;
import com.sun.source.tree.VariableTree;
import com.sun.source.tree.WhileLoopTree;
import com.sun.source.util.SourcePositions;
import com.sun.source.util.TreeScanner;
import com.sun.source.util.Trees;
import com.sun.tools.javac.api.JavacTaskImpl;
import com.sun.tools.javac.tree.JCTree;
import java.io.IOException;
import java.lang.annotation.ElementType;
import java.lang.annotation.Retention;
import java.lang.annotation.RetentionPolicy;
import java.lang.annotation.Target;
import java.lang.reflect.Method;
import java.net.URI;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.LinkedList;
import java.util.List;
import java.util.regex.Pattern;
import javax.tools.Diagnostic;
import javax.tools.DiagnosticCollector;
import javax.tools.DiagnosticListener;
import javax.tools.JavaCompiler;
import javax.tools.JavaFileObject;
import javax.tools.SimpleJavaFileObject;
import javax.tools.ToolProvider;
public class JavacParserTest extends TestCase {
static final JavaCompiler tool = ToolProvider.getSystemJavaCompiler();
private JavacParserTest(){}
public static void main(String... args) throws Exception {
new JavacParserTest().run(args);
}
class MyFileObject extends SimpleJavaFileObject {
private String text;
public MyFileObject(String text) {
super(URI.create("myfo:/Test.java"), JavaFileObject.Kind.SOURCE);
this.text = text;
}
@Override
public CharSequence getCharContent(boolean ignoreEncodingErrors) {
return text;
}
}
/*
* converts Windows to Unix style LFs for comparing strings
*/
String normalize(String in) {
return in.replace(System.getProperty("line.separator"), "\n");
}
CompilationUnitTree getCompilationUnitTree(String code) throws IOException {
JavacTaskImpl ct = (JavacTaskImpl) tool.getTask(null, null, null, null,
null, Arrays.asList(new MyFileObject(code)));
CompilationUnitTree cut = ct.parse().iterator().next();
return cut;
}
List<String> getErroneousTreeValues(ErroneousTree node) {
List<String> values = new ArrayList<>();
if (node.getErrorTrees() != null) {
for (Tree t : node.getErrorTrees()) {
values.add(t.toString());
}
} else {
throw new RuntimeException("ERROR: No Erroneous tree "
+ "has been created.");
}
return values;
}
@Test
void testPositionForSuperConstructorCalls() throws IOException {
assert tool != null;
String code = "package test; public class Test {public Test() {super();}}";
JavacTaskImpl ct = (JavacTaskImpl) tool.getTask(null, null, null, null,
null, Arrays.asList(new MyFileObject(code)));
CompilationUnitTree cut = ct.parse().iterator().next();
SourcePositions pos = Trees.instance(ct).getSourcePositions();
MethodTree method =
(MethodTree) ((ClassTree) cut.getTypeDecls().get(0)).getMembers().get(0);
ExpressionStatementTree es =
(ExpressionStatementTree) method.getBody().getStatements().get(0);
final int esStartPos = code.indexOf(es.toString());
final int esEndPos = esStartPos + es.toString().length();
assertEquals("testPositionForSuperConstructorCalls",
esStartPos, pos.getStartPosition(cut, es));
assertEquals("testPositionForSuperConstructorCalls",
esEndPos, pos.getEndPosition(cut, es));
MethodInvocationTree mit = (MethodInvocationTree) es.getExpression();
final int mitStartPos = code.indexOf(mit.toString());
final int mitEndPos = mitStartPos + mit.toString().length();
assertEquals("testPositionForSuperConstructorCalls",
mitStartPos, pos.getStartPosition(cut, mit));
assertEquals("testPositionForSuperConstructorCalls",
mitEndPos, pos.getEndPosition(cut, mit));
final int methodStartPos = mitStartPos;
final int methodEndPos = methodStartPos + mit.getMethodSelect().toString().length();
assertEquals("testPositionForSuperConstructorCalls",
methodStartPos, pos.getStartPosition(cut, mit.getMethodSelect()));
assertEquals("testPositionForSuperConstructorCalls",
methodEndPos, pos.getEndPosition(cut, mit.getMethodSelect()));
}
@Test
void testPositionForEnumModifiers() throws IOException {
final String theString = "public";
String code = "package test; " + theString + " enum Test {A;}";
JavacTaskImpl ct = (JavacTaskImpl) tool.getTask(null, null, null, null,
null, Arrays.asList(new MyFileObject(code)));
CompilationUnitTree cut = ct.parse().iterator().next();
SourcePositions pos = Trees.instance(ct).getSourcePositions();
ClassTree clazz = (ClassTree) cut.getTypeDecls().get(0);
ModifiersTree mt = clazz.getModifiers();
int spos = code.indexOf(theString);
int epos = spos + theString.length();
assertEquals("testPositionForEnumModifiers",
spos, pos.getStartPosition(cut, mt));
assertEquals("testPositionForEnumModifiers",
epos, pos.getEndPosition(cut, mt));
}
@Test
void testNewClassWithEnclosing() throws IOException {
final String theString = "Test.this.new d()";
String code = "package test; class Test { " +
"class d {} private void method() { " +
"Object o = " + theString + "; } }";
JavacTaskImpl ct = (JavacTaskImpl) tool.getTask(null, null, null, null,
null, Arrays.asList(new MyFileObject(code)));
CompilationUnitTree cut = ct.parse().iterator().next();
SourcePositions pos = Trees.instance(ct).getSourcePositions();
ClassTree clazz = (ClassTree) cut.getTypeDecls().get(0);
ExpressionTree est =
((VariableTree) ((MethodTree) clazz.getMembers().get(1)).getBody().getStatements().get(0)).getInitializer();
final int spos = code.indexOf(theString);
final int epos = spos + theString.length();
assertEquals("testNewClassWithEnclosing",
spos, pos.getStartPosition(cut, est));
assertEquals("testNewClassWithEnclosing",
epos, pos.getEndPosition(cut, est));
}
@Test
void testPreferredPositionForBinaryOp() throws IOException {
String code = "package test; public class Test {"
+ "private void test() {"
+ "Object o = null; boolean b = o != null && o instanceof String;"
+ "} private Test() {}}";
CompilationUnitTree cut = getCompilationUnitTree(code);
ClassTree clazz = (ClassTree) cut.getTypeDecls().get(0);
MethodTree method = (MethodTree) clazz.getMembers().get(0);
VariableTree condSt = (VariableTree) method.getBody().getStatements().get(1);
BinaryTree cond = (BinaryTree) condSt.getInitializer();
JCTree condJC = (JCTree) cond;
int condStartPos = code.indexOf("&&");
assertEquals("testPreferredPositionForBinaryOp",
condStartPos, condJC.pos);
}
@Test
void testErrorRecoveryForEnhancedForLoop142381() throws IOException {
String code = "package test; class Test { " +
"private void method() { " +
"java.util.Set<String> s = null; for (a : s) {} } }";
final List<Diagnostic<? extends JavaFileObject>> errors =
new LinkedList<Diagnostic<? extends JavaFileObject>>();
JavacTaskImpl ct = (JavacTaskImpl) tool.getTask(null, null,
new DiagnosticListener<JavaFileObject>() {
public void report(Diagnostic<? extends JavaFileObject> diagnostic) {
errors.add(diagnostic);
}
}, null, null, Arrays.asList(new MyFileObject(code)));
CompilationUnitTree cut = ct.parse().iterator().next();
ClassTree clazz = (ClassTree) cut.getTypeDecls().get(0);
StatementTree forStatement =
((MethodTree) clazz.getMembers().get(0)).getBody().getStatements().get(1);
assertEquals("testErrorRecoveryForEnhancedForLoop142381",
Kind.ENHANCED_FOR_LOOP, forStatement.getKind());
assertFalse("testErrorRecoveryForEnhancedForLoop142381", errors.isEmpty());
}
@Test
void testPositionAnnotationNoPackage187551() throws IOException {
String code = "\n@interface Test {}";
JavacTaskImpl ct = (JavacTaskImpl) tool.getTask(null, null, null, null,
null, Arrays.asList(new MyFileObject(code)));
CompilationUnitTree cut = ct.parse().iterator().next();
ClassTree clazz = (ClassTree) cut.getTypeDecls().get(0);
Trees t = Trees.instance(ct);
assertEquals("testPositionAnnotationNoPackage187551",
1, t.getSourcePositions().getStartPosition(cut, clazz));
}
@Test
void testPositionsSane1() throws IOException {
performPositionsSanityTest("package test; class Test { " +
"private void method() { " +
"java.util.List<? extends java.util.List<? extends String>> l; " +
"} }");
}
@Test
void testPositionsSane2() throws IOException {
performPositionsSanityTest("package test; class Test { " +
"private void method() { " +
"java.util.List<? super java.util.List<? super String>> l; " +
"} }");
}
@Test
void testPositionsSane3() throws IOException {
performPositionsSanityTest("package test; class Test { " +
"private void method() { " +
"java.util.List<? super java.util.List<?>> l; } }");
}
private void performPositionsSanityTest(String code) throws IOException {
final List<Diagnostic<? extends JavaFileObject>> errors =
new LinkedList<Diagnostic<? extends JavaFileObject>>();
JavacTaskImpl ct = (JavacTaskImpl) tool.getTask(null, null,
new DiagnosticListener<JavaFileObject>() {
public void report(Diagnostic<? extends JavaFileObject> diagnostic) {
errors.add(diagnostic);
}
}, null, null, Arrays.asList(new MyFileObject(code)));
final CompilationUnitTree cut = ct.parse().iterator().next();
final Trees trees = Trees.instance(ct);
new TreeScanner<Void, Void>() {
private long parentStart = 0;
private long parentEnd = Integer.MAX_VALUE;
@Override
public Void scan(Tree node, Void p) {
if (node == null) {
return null;
}
long start = trees.getSourcePositions().getStartPosition(cut, node);
if (start == (-1)) {
return null; // synthetic tree
}
assertTrue(node.toString() + ":" + start + "/" + parentStart,
parentStart <= start);
long prevParentStart = parentStart;
parentStart = start;
long end = trees.getSourcePositions().getEndPosition(cut, node);
assertTrue(node.toString() + ":" + end + "/" + parentEnd,
end <= parentEnd);
long prevParentEnd = parentEnd;
parentEnd = end;
super.scan(node, p);
parentStart = prevParentStart;
parentEnd = prevParentEnd;
return null;
}
private void assertTrue(String message, boolean b) {
if (!b) fail(message);
}
}.scan(cut, null);
}
@Test
void testCorrectWilcardPositions1() throws IOException {
performWildcardPositionsTest("package test; import java.util.List; " +
"class Test { private void method() { List<? extends List<? extends String>> l; } }",
Arrays.asList("List<? extends List<? extends String>> l;",
"List<? extends List<? extends String>>",
"List",
"? extends List<? extends String>",
"List<? extends String>",
"List",
"? extends String",
"String"));
}
@Test
void testCorrectWilcardPositions2() throws IOException {
performWildcardPositionsTest("package test; import java.util.List; "
+ "class Test { private void method() { List<? super List<? super String>> l; } }",
Arrays.asList("List<? super List<? super String>> l;",
"List<? super List<? super String>>",
"List",
"? super List<? super String>",
"List<? super String>",
"List",
"? super String",
"String"));
}
@Test
void testCorrectWilcardPositions3() throws IOException {
performWildcardPositionsTest("package test; import java.util.List; " +
"class Test { private void method() { List<? super List<?>> l; } }",
Arrays.asList("List<? super List<?>> l;",
"List<? super List<?>>",
"List",
"? super List<?>",
"List<?>",
"List",
"?"));
}
@Test
void testCorrectWilcardPositions4() throws IOException {
performWildcardPositionsTest("package test; import java.util.List; " +
"class Test { private void method() { " +
"List<? extends List<? extends List<? extends String>>> l; } }",
Arrays.asList("List<? extends List<? extends List<? extends String>>> l;",
"List<? extends List<? extends List<? extends String>>>",
"List",
"? extends List<? extends List<? extends String>>",
"List<? extends List<? extends String>>",
"List",
"? extends List<? extends String>",
"List<? extends String>",
"List",
"? extends String",
"String"));
}
@Test
void testCorrectWilcardPositions5() throws IOException {
performWildcardPositionsTest("package test; import java.util.List; " +
"class Test { private void method() { " +
"List<? extends List<? extends List<? extends String >>> l; } }",
Arrays.asList("List<? extends List<? extends List<? extends String >>> l;",
"List<? extends List<? extends List<? extends String >>>",
"List",
"? extends List<? extends List<? extends String >>",
"List<? extends List<? extends String >>",
"List",
"? extends List<? extends String >",
"List<? extends String >",
"List",
"? extends String",
"String"));
}
void performWildcardPositionsTest(final String code,
List<String> golden) throws IOException {
final List<Diagnostic<? extends JavaFileObject>> errors =
new LinkedList<Diagnostic<? extends JavaFileObject>>();
JavacTaskImpl ct = (JavacTaskImpl) tool.getTask(null, null,
new DiagnosticListener<JavaFileObject>() {
public void report(Diagnostic<? extends JavaFileObject> diagnostic) {
errors.add(diagnostic);
}
}, null, null, Arrays.asList(new MyFileObject(code)));
final CompilationUnitTree cut = ct.parse().iterator().next();
final List<String> content = new LinkedList<String>();
final Trees trees = Trees.instance(ct);
new TreeScanner<Void, Void>() {
@Override
public Void scan(Tree node, Void p) {
if (node == null) {
return null;
}
long start = trees.getSourcePositions().getStartPosition(cut, node);
if (start == (-1)) {
return null; // synthetic tree
}
long end = trees.getSourcePositions().getEndPosition(cut, node);
String s = code.substring((int) start, (int) end);
content.add(s);
return super.scan(node, p);
}
}.scan(((MethodTree) ((ClassTree) cut.getTypeDecls().get(0)).getMembers().get(0)).getBody().getStatements().get(0), null);
assertEquals("performWildcardPositionsTest",golden.toString(),
content.toString());
}
@Test
void testStartPositionForMethodWithoutModifiers() throws IOException {
String code = "package t; class Test { <T> void t() {} }";
JavacTaskImpl ct = (JavacTaskImpl) tool.getTask(null, null, null, null,
null, Arrays.asList(new MyFileObject(code)));
CompilationUnitTree cut = ct.parse().iterator().next();
ClassTree clazz = (ClassTree) cut.getTypeDecls().get(0);
MethodTree mt = (MethodTree) clazz.getMembers().get(0);
Trees t = Trees.instance(ct);
int start = (int) t.getSourcePositions().getStartPosition(cut, mt);
int end = (int) t.getSourcePositions().getEndPosition(cut, mt);
assertEquals("testStartPositionForMethodWithoutModifiers",
"<T> void t() {}", code.substring(start, end));
}
@Test
void testVariableInIfThen1() throws IOException {
String code = "package t; class Test { " +
"private static void t(String name) { " +
"if (name != null) String nn = name.trim(); } }";
DiagnosticCollector<JavaFileObject> coll =
new DiagnosticCollector<JavaFileObject>();
JavacTaskImpl ct = (JavacTaskImpl) tool.getTask(null, null, coll, null,
null, Arrays.asList(new MyFileObject(code)));
ct.parse();
List<String> codes = new LinkedList<String>();
for (Diagnostic<? extends JavaFileObject> d : coll.getDiagnostics()) {
codes.add(d.getCode());
}
assertEquals("testVariableInIfThen1",
Arrays.<String>asList("compiler.err.variable.not.allowed"),
codes);
}
@Test
void testVariableInIfThen2() throws IOException {
String code = "package t; class Test { " +
"private static void t(String name) { " +
"if (name != null) class X {} } }";
DiagnosticCollector<JavaFileObject> coll =
new DiagnosticCollector<JavaFileObject>();
JavacTaskImpl ct = (JavacTaskImpl) tool.getTask(null, null, coll, null,
null, Arrays.asList(new MyFileObject(code)));
ct.parse();
List<String> codes = new LinkedList<String>();
for (Diagnostic<? extends JavaFileObject> d : coll.getDiagnostics()) {
codes.add(d.getCode());
}
assertEquals("testVariableInIfThen2",
Arrays.<String>asList("compiler.err.class.not.allowed"), codes);
}
@Test
void testVariableInIfThen3() throws IOException {
String code = "package t; class Test { "+
"private static void t() { " +
"if (true) abstract class F {} }}";
DiagnosticCollector<JavaFileObject> coll =
new DiagnosticCollector<JavaFileObject>();
JavacTaskImpl ct = (JavacTaskImpl) tool.getTask(null, null, coll, null,
null, Arrays.asList(new MyFileObject(code)));
ct.parse();
List<String> codes = new LinkedList<String>();
for (Diagnostic<? extends JavaFileObject> d : coll.getDiagnostics()) {
codes.add(d.getCode());
}
assertEquals("testVariableInIfThen3",
Arrays.<String>asList("compiler.err.class.not.allowed"), codes);
}
@Test
void testVariableInIfThen4() throws IOException {
String code = "package t; class Test { "+
"private static void t(String name) { " +
"if (name != null) interface X {} } }";
DiagnosticCollector<JavaFileObject> coll =
new DiagnosticCollector<JavaFileObject>();
JavacTaskImpl ct = (JavacTaskImpl) tool.getTask(null, null, coll, null,
null, Arrays.asList(new MyFileObject(code)));
ct.parse();
List<String> codes = new LinkedList<String>();
for (Diagnostic<? extends JavaFileObject> d : coll.getDiagnostics()) {
codes.add(d.getCode());
}
assertEquals("testVariableInIfThen4",
Arrays.<String>asList("compiler.err.class.not.allowed"), codes);
}
@Test
void testVariableInIfThen5() throws IOException {
String code = "package t; class Test { "+
"private static void t() { " +
"if (true) } }";
DiagnosticCollector<JavaFileObject> coll =
new DiagnosticCollector<JavaFileObject>();
JavacTaskImpl ct = (JavacTaskImpl) tool.getTask(null, null, coll, null,
null, Arrays.asList(new MyFileObject(code)));
ct.parse();
List<String> codes = new LinkedList<String>();
for (Diagnostic<? extends JavaFileObject> d : coll.getDiagnostics()) {
codes.add(d.getCode());
}
assertEquals("testVariableInIfThen5",
Arrays.<String>asList("compiler.err.illegal.start.of.stmt"),
codes);
}
// see javac bug #6882235, NB bug #98234:
@Test
void testMissingExponent() throws IOException {
String code = "\nclass Test { { System.err.println(0e); } }";
JavacTaskImpl ct = (JavacTaskImpl) tool.getTask(null, null, null, null,
null, Arrays.asList(new MyFileObject(code)));
assertNotNull(ct.parse().iterator().next());
}
@Test
void testTryResourcePos() throws IOException {
final String code = "package t; class Test { " +
"{ try (java.io.InputStream in = null) { } } }";
CompilationUnitTree cut = getCompilationUnitTree(code);
new TreeScanner<Void, Void>() {
@Override
public Void visitVariable(VariableTree node, Void p) {
if ("in".contentEquals(node.getName())) {
JCTree.JCVariableDecl var = (JCTree.JCVariableDecl) node;
assertEquals("testTryResourcePos", "in = null) { } } }",
code.substring(var.pos));
}
return super.visitVariable(node, p);
}
}.scan(cut, null);
}
@Test
void testVarPos() throws IOException {
final String code = "package t; class Test { " +
"{ java.io.InputStream in = null; } }";
CompilationUnitTree cut = getCompilationUnitTree(code);
new TreeScanner<Void, Void>() {
@Override
public Void visitVariable(VariableTree node, Void p) {
if ("in".contentEquals(node.getName())) {
JCTree.JCVariableDecl var = (JCTree.JCVariableDecl) node;
assertEquals("testVarPos","in = null; } }",
code.substring(var.pos));
}
return super.visitVariable(node, p);
}
}.scan(cut, null);
}
// expected erroneous tree: int x = y;(ERROR);
@Test
void testOperatorMissingError() throws IOException {
String code = "package test; public class ErrorTest { "
+ "void method() { int x = y z } }";
CompilationUnitTree cut = getCompilationUnitTree(code);
final List<String> values = new ArrayList<>();
final List<String> expectedValues =
new ArrayList<>(Arrays.asList("[z]"));
new TreeScanner<Void, Void>() {
@Override
public Void visitErroneous(ErroneousTree node, Void p) {
values.add(getErroneousTreeValues(node).toString());
return null;
}
}.scan(cut, null);
assertEquals("testSwitchError: The Erroneous tree "
+ "error values: " + values
+ " do not match expected error values: "
+ expectedValues, values, expectedValues);
}
// expected erroneous tree: String s = (ERROR);
@Test
void testMissingParenthesisError() throws IOException {
String code = "package test; public class ErrorTest { "
+ "void f() {String s = new String; } }";
CompilationUnitTree cut = getCompilationUnitTree(code);
final List<String> values = new ArrayList<>();
final List<String> expectedValues =
new ArrayList<>(Arrays.asList("[new String()]"));
new TreeScanner<Void, Void>() {
@Override
public Void visitErroneous(ErroneousTree node, Void p) {
values.add(getErroneousTreeValues(node).toString());
return null;
}
}.scan(cut, null);
assertEquals("testSwitchError: The Erroneous tree "
+ "error values: " + values
+ " do not match expected error values: "
+ expectedValues, values, expectedValues);
}
// expected erroneous tree: package test; (ERROR)(ERROR)
@Test
void testMissingClassError() throws IOException {
String code = "package Test; clas ErrorTest { "
+ "void f() {String s = new String(); } }";
CompilationUnitTree cut = getCompilationUnitTree(code);
final List<String> values = new ArrayList<>();
final List<String> expectedValues =
new ArrayList<>(Arrays.asList("[, clas]", "[]"));
new TreeScanner<Void, Void>() {
@Override
public Void visitErroneous(ErroneousTree node, Void p) {
values.add(getErroneousTreeValues(node).toString());
return null;
}
}.scan(cut, null);
assertEquals("testSwitchError: The Erroneous tree "
+ "error values: " + values
+ " do not match expected error values: "
+ expectedValues, values, expectedValues);
}
// expected erroneous tree: void m1(int i) {(ERROR);{(ERROR);}
@Test
void testSwitchError() throws IOException {
String code = "package test; public class ErrorTest { "
+ "int numDays; void m1(int i) { switchh {i} { case 1: "
+ "numDays = 31; break; } } }";
CompilationUnitTree cut = getCompilationUnitTree(code);
final List<String> values = new ArrayList<>();
final List<String> expectedValues =
new ArrayList<>(Arrays.asList("[switchh]", "[i]"));
new TreeScanner<Void, Void>() {
@Override
public Void visitErroneous(ErroneousTree node, Void p) {
values.add(getErroneousTreeValues(node).toString());
return null;
}
}.scan(cut, null);
assertEquals("testSwitchError: The Erroneous tree "
+ "error values: " + values
+ " do not match expected error values: "
+ expectedValues, values, expectedValues);
}
// expected erroneous tree: class ErrorTest {(ERROR)
@Test
void testMethodError() throws IOException {
String code = "package Test; class ErrorTest { "
+ "static final void f) {String s = new String(); } }";
CompilationUnitTree cut = cut = getCompilationUnitTree(code);
final List<String> values = new ArrayList<>();
final List<String> expectedValues =
new ArrayList<>(Arrays.asList("[\nstatic final void f();]"));
new TreeScanner<Void, Void>() {
@Override
public Void visitErroneous(ErroneousTree node, Void p) {
values.add(normalize(getErroneousTreeValues(node).toString()));
return null;
}
}.scan(cut, null);
assertEquals("testMethodError: The Erroneous tree "
+ "error value: " + values
+ " does not match expected error values: "
+ expectedValues, values, expectedValues);
}
/*
* The following tests do not work just yet with nb-javac nor javac,
* they need further investigation, see CR: 7167356
*/
void testPositionBrokenSource126732a() throws IOException {
String[] commands = new String[]{
"return Runnable()",
"do { } while (true)",
"throw UnsupportedOperationException()",
"assert true",
"1 + 1",};
for (String command : commands) {
String code = "package test;\n"
+ "public class Test {\n"
+ " public static void test() {\n"
+ " " + command + " {\n"
+ " new Runnable() {\n"
+ " };\n"
+ " }\n"
+ "}";
JavacTaskImpl ct = (JavacTaskImpl) tool.getTask(null, null, null,
null, null, Arrays.asList(new MyFileObject(code)));
CompilationUnitTree cut = ct.parse().iterator().next();
ClassTree clazz = (ClassTree) cut.getTypeDecls().get(0);
MethodTree method = (MethodTree) clazz.getMembers().get(0);
List<? extends StatementTree> statements =
method.getBody().getStatements();
StatementTree ret = statements.get(0);
StatementTree block = statements.get(1);
Trees t = Trees.instance(ct);
int len = code.indexOf(command + " {") + (command + " ").length();
assertEquals(command, len,
t.getSourcePositions().getEndPosition(cut, ret));
assertEquals(command, len,
t.getSourcePositions().getStartPosition(cut, block));
}
}
void testPositionBrokenSource126732b() throws IOException {
String[] commands = new String[]{
"break",
"break A",
"continue ",
"continue A",};
for (String command : commands) {
String code = "package test;\n"
+ "public class Test {\n"
+ " public static void test() {\n"
+ " while (true) {\n"
+ " " + command + " {\n"
+ " new Runnable() {\n"
+ " };\n"
+ " }\n"
+ " }\n"
+ "}";
JavacTaskImpl ct = (JavacTaskImpl) tool.getTask(null, null, null,
null, null, Arrays.asList(new MyFileObject(code)));
CompilationUnitTree cut = ct.parse().iterator().next();
ClassTree clazz = (ClassTree) cut.getTypeDecls().get(0);
MethodTree method = (MethodTree) clazz.getMembers().get(0);
List<? extends StatementTree> statements =
((BlockTree) ((WhileLoopTree) method.getBody().getStatements().get(0)).getStatement()).getStatements();
StatementTree ret = statements.get(0);
StatementTree block = statements.get(1);
Trees t = Trees.instance(ct);
int len = code.indexOf(command + " {") + (command + " ").length();
assertEquals(command, len,
t.getSourcePositions().getEndPosition(cut, ret));
assertEquals(command, len,
t.getSourcePositions().getStartPosition(cut, block));
}
}
void testStartPositionEnumConstantInit() throws IOException {
String code = "package t; enum Test { AAA; }";
JavacTaskImpl ct = (JavacTaskImpl) tool.getTask(null, null, null, null,
null, Arrays.asList(new MyFileObject(code)));
CompilationUnitTree cut = ct.parse().iterator().next();
ClassTree clazz = (ClassTree) cut.getTypeDecls().get(0);
VariableTree enumAAA = (VariableTree) clazz.getMembers().get(0);
Trees t = Trees.instance(ct);
int start = (int) t.getSourcePositions().getStartPosition(cut,
enumAAA.getInitializer());
assertEquals("testStartPositionEnumConstantInit", -1, start);
}
void run(String[] args) throws Exception {
int passed = 0, failed = 0;
final Pattern p = (args != null && args.length > 0)
? Pattern.compile(args[0])
: null;
for (Method m : this.getClass().getDeclaredMethods()) {
boolean selected = (p == null)
? m.isAnnotationPresent(Test.class)
: p.matcher(m.getName()).matches();
if (selected) {
try {
m.invoke(this, (Object[]) null);
System.out.println(m.getName() + ": OK");
passed++;
} catch (Throwable ex) {
System.out.printf("Test %s failed: %s %n", m, ex.getCause());
failed++;
}
}
}
System.out.printf("Passed: %d, Failed %d%n", passed, failed);
if (failed > 0) {
throw new RuntimeException("Tests failed: " + failed);
}
if (passed == 0 && failed == 0) {
throw new AssertionError("No test(s) selected: passed = " +
passed + ", failed = " + failed + " ??????????");
}
}
}
abstract class TestCase {
void assertEquals(String message, int i, int pos) {
if (i != pos) {
fail(message);
}
}
void assertFalse(String message, boolean bvalue) {
if (bvalue == true) {
fail(message);
}
}
void assertEquals(String message, int i, long l) {
if (i != l) {
fail(message + ":" + i + ":" + l);
}
}
void assertEquals(String message, Object o1, Object o2) {
if (o1 != null && o2 != null && !o1.equals(o2)) {
fail(message);
}
if (o1 == null && o2 != null) {
fail(message);
}
}
void assertNotNull(Object o) {
if (o == null) {
fail();
}
}
void fail() {
fail("test failed");
}
void fail(String message) {
throw new RuntimeException(message);
}
/**
* Indicates that the annotated method is a test method.
*/
@Retention(RetentionPolicy.RUNTIME)
@Target(ElementType.METHOD)
public @interface Test {}
}
|
apache/derby | 36,834 | java/org.apache.derby.engine/org/apache/derby/impl/store/access/btree/LeafControlRow.java | /*
Derby - Class org.apache.derby.impl.store.access.btree.LeafControlRow
Licensed to the Apache Software Foundation (ASF) under one or more
contributor license agreements. See the NOTICE file distributed with
this work for additional information regarding copyright ownership.
The ASF licenses this file to you under the Apache License, Version 2.0
(the "License"); you may not use this file except in compliance with
the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
*/
package org.apache.derby.impl.store.access.btree;
import org.apache.derby.shared.common.reference.SQLState;
import org.apache.derby.shared.common.sanity.SanityManager;
import org.apache.derby.iapi.services.io.StoredFormatIds;
import org.apache.derby.shared.common.error.StandardException;
import org.apache.derby.iapi.store.access.conglomerate.LogicalUndo;
import org.apache.derby.iapi.store.access.AccessFactoryGlobals;
import org.apache.derby.iapi.store.access.ConglomerateController;
import org.apache.derby.iapi.store.raw.ContainerHandle;
import org.apache.derby.iapi.store.raw.FetchDescriptor;
import org.apache.derby.iapi.store.raw.Page;
import org.apache.derby.iapi.store.raw.RecordHandle;
import org.apache.derby.iapi.types.DataValueDescriptor;
import org.apache.derby.iapi.services.io.FormatableBitSet;
/**
* @derby.formatId ACCESS_BTREE_LEAFCONTROLROW_V1_ID
*
* @derby.purpose Btree pages all have a control row at the front of every page. To
* determine the type of row, read the first column which is a format
* id and it tells what kind of control row it is.
*
* @derby.upgrade This format was made obsolete in the kimono release.
*
* @derby.diskLayout
* column 1 - control row type : StorableFormatId
* column 2 - left sibling page number : SQLLongint
* column 3 - right sibling page number: SQLLongint
* column 4 - parent page number : SQLLongint
* column 5 - level number (0 is leaf) : SQLLongint
* column 6 - isRoot : SQLLongint
* column 7 - Conglomerate object : null unless it is root else
* a Conglomerate object, matching
* that of current table.
* Currently this field
* is only used by logical undo and
* the type of object is inferred by
* the logical undo code.
**/
public class LeafControlRow extends ControlRow
{
/*
** Constructors of BranchControlRow
*/
/**
* No arg constructor.
* <p>
* Public no arg constructor is for the monitor to call for format
* id implemenation, it should not be called for any other reason.
**/
public LeafControlRow()
{
}
/**
* Constructs a leaf-page control row, for a newly allocated leaf page.
*
* @param btree The open btree to allocate this page from.
* @param page The newly allocated page where the control row will
* be inserted.
* @param parent The parent of the leaf page. Set to null for root.
* RESOLVE (mikem) - set to null otherwise?
* @param isRoot Is this page the root of the tree?
*
* @exception StandardException Standard exception policy.
*/
LeafControlRow(
OpenBTree btree,
Page page,
ControlRow parent,
boolean isRoot)
throws StandardException
{
// All leaf pages are at level 0.
super(btree, page, 0, parent, isRoot);
}
/* Private/Protected methods of This class: */
/**
* Allocate a new leaf page to the conglomerate.
*
* @param btree The open conglomerate from which to get the leaf from
* @param parent The parent page of the newly allocated page, null if
* allocating root page.
*
* @exception StandardException Standard exception policy.
*/
private static LeafControlRow allocate(
OpenBTree btree,
ControlRow parent)
throws StandardException
{
Page page = btree.container.addPage();
// Create a control row for the new page.
LeafControlRow control_row =
new LeafControlRow(btree, page, parent, false);
// Insert the control row on the page, in the first slot on the page.
// This operation is only done as part of a new tree or split, which
// which both will be undone physically so no logical undo record is
// needed.
byte insertFlag = Page.INSERT_INITIAL;
insertFlag |= Page.INSERT_DEFAULT;
RecordHandle rh =
page.insertAtSlot(Page.FIRST_SLOT_NUMBER,
control_row.getRow(),
(FormatableBitSet) null,
(LogicalUndo) null, insertFlag,
AccessFactoryGlobals.BTREE_OVERFLOW_THRESHOLD);
if (SanityManager.DEBUG)
{
RecordHandle rh2 = null;
rh2 = page.fetchFromSlot(
(RecordHandle) null, page.FIRST_SLOT_NUMBER,
new DataValueDescriptor[0], (FetchDescriptor) null, true);
SanityManager.ASSERT(rh.getId() == rh2.getId() &&
rh.getPageNumber() == rh2.getPageNumber());
}
// Page is returned latched.
return(control_row);
}
/**
* Return the number of non-deleted rows from slot 1 through "startslot"
* <p>
* Return the number of non-deleted rows that exist on the page starting
* at slot one through "startslot".
* <p>
* RESOLVE (mikem) - is the expense of this routine worth it, it is only
* used for costing. Could an estimate from the nonDeletedRecordCount()
* be used instead?
*
* @return The requested non_deleted_row_count.
*
* @param startslot Count non deleted row up to and including this slot.
*
* @exception StandardException Standard exception policy.
**/
private float get_left_nondeleted_rowcnt(
int startslot)
throws StandardException
{
int non_deleted_row_count = 0;
for (int slot = 1; slot <= startslot; slot++)
{
if (!this.page.isDeletedAtSlot(slot))
{
non_deleted_row_count++;
}
}
return(non_deleted_row_count);
}
/* Public Methods of LeafControlRow class: */
/**
* Perform page specific initialization.
* <p>
**/
protected final void controlRowInit()
{
}
/**
* Initialize conglomerate with one page, to be a 1 page btree.
*
* Given a conglomerate which already has one page allocated to it,
* initialize the page to be a leaf-root page with no entries. Allocate
* the control row and store it on the page.
*
* @param open_btree The open btree to initialize (container is open).
*
* @exception StandardException Standard exception policy.
*/
public static void initEmptyBtree(
OpenBTree open_btree)
throws StandardException
{
Page page =
open_btree.container.getPage(ContainerHandle.FIRST_PAGE_NUMBER);
// create a leaf control row for root page of a single page index //
LeafControlRow control_row =
new LeafControlRow(open_btree, page, null, true);
byte insertFlag = Page.INSERT_INITIAL;
insertFlag |= Page.INSERT_DEFAULT;
RecordHandle rh =
page.insertAtSlot(
Page.FIRST_SLOT_NUMBER,
control_row.getRow(),
(FormatableBitSet) null,
(LogicalUndo) null, insertFlag,
AccessFactoryGlobals.BTREE_OVERFLOW_THRESHOLD);
if (SanityManager.DEBUG)
{
RecordHandle rh2 = null;
rh2 = page.fetchFromSlot(
(RecordHandle) null,
Page.FIRST_SLOT_NUMBER,
new DataValueDescriptor[0], (FetchDescriptor) null, true);
SanityManager.ASSERT(rh.getId() == rh2.getId() &&
rh.getPageNumber() == rh2.getPageNumber());
}
if (SanityManager.DEBUG)
{
if (SanityManager.DEBUG_ON("enableBtreeConsistencyCheck"))
{
control_row.checkConsistency(
open_btree, (ControlRow) null, true);
}
}
page.unlatch();
return;
}
/*
** Non - Debug/consistency check Methods of ControlRow:
*/
/**
* Get the number of columns in the control row.
* <p>
* Control rows all share the first columns as defined by this class and
* then add columns to the end of the control row. For instance a branch
* control row add a child page pointer field.
* <p>
*
* @return The total number of columns in the control row.
**/
protected final int getNumberOfControlRowColumns()
{
return(this.CR_NCOLUMNS);
}
/**
* Is the current page the leftmost leaf of tree?
* <p>
*
* @return true if the current page is the leftmost leaf of the tree,
* else return false.
*
* @exception StandardException Standard exception policy.
**/
public boolean isLeftmostLeaf()
throws StandardException
{
return(getleftSiblingPageNumber() ==
ContainerHandle.INVALID_PAGE_NUMBER);
}
/**
* Is the current page the rightmost leaf of tree?
* <p>
*
* @return true if the current page is the rightmost leaf of the tree,
* else return false.
*
* @exception StandardException Standard exception policy.
**/
public boolean isRightmostLeaf()
throws StandardException
{
return(getrightSiblingPageNumber() ==
ContainerHandle.INVALID_PAGE_NUMBER);
}
/**
** Perform a search of this leaf page, ultimately returning the latched
** leaf page and row slot after which the given key belongs.
** The slot is returned in the result structure. If the key
** exists on the page, the result.exact will be true. Otherwise,
** result.exact will be false, and the row slot returned will be
** the one immediately preceding the position at which the key
** belongs.
*
* @exception StandardException Standard exception policy.
**/
public ControlRow search(
SearchParameters sp)
throws StandardException
{
searchForEntry(sp);
if (sp.searchForOptimizer)
{
// Update left_fraction to be used to estimate the number of
// rows left of the current search location.
// after the code below startslot will be the slot that is one
// before the first slot to be returned by the scan positioning
// for this key, including GT/GE positioning. This is exactly
// what the LeafControlRow.positionAtStartForForwardScan() does,
// to position for the start of a scan.
int startslot = sp.resultSlot;
if (sp.resultExact)
{
// we found exactly the row we are looking for.
if (SanityManager.DEBUG)
SanityManager.ASSERT(sp.resultSlot > 0);
// RESOLVE (mikem) - add in a search operator argument so that
// below can be if (op == ScanController.GE)
if (sp.partial_key_match_op ==
SearchParameters.POSITION_LEFT_OF_PARTIAL_KEY_MATCH)
{
// This means the scan was positioned for GE rather than GT
startslot--;
}
}
// non_deleted_left_row is the number of actual rows left of the
// first row to be returned by a scan positioned as requested.
// The 0th slot is a control row which is not counted.
float non_deleted_left_rows = get_left_nondeleted_rowcnt(startslot);
int non_deleted_row_count = this.page.nonDeletedRecordCount();
// System.out.println(
// "\n\t non_deleted_row_count = " + non_deleted_row_count +
// "\n\t non_deleted_left_rows = " + non_deleted_left_rows +
// "\n\t startslot = " + startslot);
if (this.getIsRoot())
{
sp.current_fraction = 1;
sp.left_fraction = 0;
}
// calculate the fraction of rows in the table which are left of
// the current slot in the search. After the search is completed
// (sp.left_fraction * number of rows), is the estimated number
// of rows to the left of the current row.
if (non_deleted_row_count > 1)
sp.left_fraction +=
(sp.current_fraction) *
(non_deleted_left_rows / (non_deleted_row_count - 1));
// no-one really uses current fraction after leaf is through with
// it. Set it to help diagnose algorithm.
if (non_deleted_row_count > 1)
sp.current_fraction =
(sp.current_fraction) *
(((float) 1) / (non_deleted_row_count - 1));
}
return(this);
}
/**
* Search and return the left most leaf page.
* <p>
* Perform a recursive search, ultimately returning the
* leftmost leaf page which is the first leaf page in the
* leaf sibling chain. (This method might better be called
* getFirstLeafPage()).
*
* @return The leftmost leaf page.
*
* @param btree The open btree to associate latches/locks with.
*
* @exception StandardException Standard exception policy.
**/
protected ControlRow searchLeft(OpenBTree btree)
throws StandardException
{
return(this);
}
/**
* Search and return the right most leaf page.
* <p>
* Perform a recursive search, ultimately returning the
* rightmost leaf page which is the last leaf page in the
* leaf sibling chain. (This method might better be called
* getLastLeafPage()).
*
* @return The rightmost leaf page.
*
* @param btree The open btree to associate latches/locks with.
*
* @exception StandardException Standard exception policy.
**/
protected ControlRow searchRight(OpenBTree btree)
throws StandardException
{
return(this);
}
/**
** Perform a recursive shrink operation for the key.
** If this method returns true, the caller should
** remove the corresponding entry for the page.
** This routine is not guaranteed to successfully
** shrink anything. The page lead to by the key might
** turn out not to be empty by the time shrink gets
** there, and shrinks will give up if there is a deadlock.
** <P>
** The receiver page must be latched on entry and is
** returned unlatched.
*
* @exception StandardException Standard exception policy.
**/
protected boolean shrinkFor(
OpenBTree btree,
DataValueDescriptor[] key)
throws StandardException
{
boolean shrink_me = false;
try
{
// If this page is empty (ie. only has a control row), and it's not
// the root page, unlink it. An empty btree consists of
// simply an empty leaf-root page.
// RESOLVE (mikem) - may want this routine to try to purge
// committed delete rows here?
if ((this.page.recordCount() == 1) && !getIsRoot())
{
// See if we can unlink this page (might not be able to because
// unlinking can cause deadlocks). A successful unlink
// unlatches the page.
shrink_me = unlink(btree);
}
}
finally
{
if (!shrink_me)
this.release();
}
return(shrink_me);
}
/**
* Perform a top down split pass making room for the the key in "row".
* <p>
* Perform a split such that a subsequent call to insert
* given the argument index row will likely find room for it. Since
* latches are released the client must code for the case where another
* user has grabbed the space made available by the split pass and be
* ready to do another split.
* <p>
* On entry, the parent is either null or latched, and the
* current page is latched. On exit, all pages will have been
* unlatched. If the parent is null, then this page is a root
* leaf page.
*
* @return page number of the newly allocated leaf page created by split.
*
* @param open_btree The open btree to associate latches with.
* @param template A scratch area to use while searching for split pass.
* @param parent_page The parent page of the current page in the split pass.
* starts at null for root.
* @param splitrow The key to make room for during the split pass.
* @param flag A flag used to direct where point of split should be
* chosen.
*
* @exception StandardException Standard exception policy.
**/
protected long splitFor(
OpenBTree open_btree,
DataValueDescriptor[] template,
BranchControlRow parent_page,
DataValueDescriptor[] splitrow,
int flag)
throws StandardException
{
long current_leaf_pageno = this.page.getPageNumber();
if (SanityManager.DEBUG)
{
if (parent_page == null && ( ! this.getIsRoot()))
SanityManager.THROWASSERT(
this + " splitFor null parent and non-root");
}
// See if this page has space.
if ((this.page.recordCount() - 1 < BTree.maxRowsPerPage) &&
(this.page.spaceForInsert(splitrow, (FormatableBitSet) null,
AccessFactoryGlobals.BTREE_OVERFLOW_THRESHOLD)))
{
// The splitFor() operation is complete, commit the work done
// before releasing the latches.
open_btree.getXactMgr().commit();
if (parent_page != null)
parent_page.release();
this.release();
return(current_leaf_pageno);
}
// RESOLVE (mikem) - for rows bigger than pages this assert may
// trigger until we have long rows.
if (SanityManager.DEBUG)
SanityManager.ASSERT(this.page.recordCount() > 1);
// Track.LeafSplit++;
if (this.getIsRoot())
{
// Track.LeafSplitRoot++;
growRoot(open_btree, template, this);
// At this point, this page has been unlatched. So code below this
// point must not access this object's fields.
ControlRow new_root = ControlRow.get(open_btree, BTree.ROOTPAGEID);
return(
new_root.splitFor(open_btree, template, null, splitrow, flag));
}
// At this point we know that this page has to be split and
// that it isn't a root page.
int splitpoint = (this.page.recordCount() - 1) / 2 + 1;
if ((flag & ControlRow.SPLIT_FLAG_FIRST_ON_PAGE) != 0)
{
// move all the row to the new page
splitpoint = 1;
}
else if ((flag & ControlRow.SPLIT_FLAG_LAST_ON_PAGE) != 0)
{
// This is not optimal as we would rather move no rows to the
// next page, but what should we use as a discriminator?
splitpoint = this.page.recordCount() - 1;
}
if (SanityManager.DEBUG)
{
if (splitpoint <= 0)
SanityManager.THROWASSERT(this + " yikes! splitpoint of 0!");
}
// Save away current split point leaf row, and build a branch row
// based on it.
DataValueDescriptor[] split_leaf_row =
open_btree.getConglomerate().createTemplate(
open_btree.getRawTran());
this.page.fetchFromSlot(
(RecordHandle) null, splitpoint, split_leaf_row,
(FetchDescriptor) null, true);
// Create the branch row to insert onto the parent page. For now
// use a fake page number because we don't know the real page
// number until the allocate is done, but want to delay the
// allocate until we know the insert will succeed.
BranchRow branchrow = BranchRow.createBranchRowFromOldLeafRow(
split_leaf_row, BranchRow.DUMMY_PAGE_NUMBER);
// At this point we have guaranteed there is space in the parent
// page for splitrow, but it could be the case that the new
// "branchrow" does not fit on the parent page.
if (!parent_page.page.spaceForInsert(
branchrow.getRow(), (FormatableBitSet) null,
AccessFactoryGlobals.BTREE_OVERFLOW_THRESHOLD))
{
// There is no room on the parent page to complete a split at
// the current level, so restart the split at top with the
// branchrow that did not fit. On return from this routine
// there is no way to know the state of the tree, so the
// current split pass recursion must end.
return(
BranchControlRow.restartSplitFor(
open_btree, template, parent_page, this,
branchrow.getRow(), splitrow, flag));
}
// Create a new leaf page under the parent.
LeafControlRow newleaf =
LeafControlRow.allocate(open_btree, parent_page);
// Now that we know the page number of the new child page update
// the branch row to be inserted with the correct value.
branchrow.setPageNumber(newleaf.page.getPageNumber());
// Test fail after allocation
if (SanityManager.DEBUG)
{
if (SanityManager.DEBUG_ON("leaf_split_abort1"))
{
throw StandardException.newException(
SQLState.BTREE_ABORT_THROUGH_TRACE);
}
}
// Link it to the right of the current page.
newleaf.linkRight(open_btree, this);
// Copy the index rows (from the splitpoint to the end of the page)
// from the old page to the new leaf, do not
// copy the control row. This routine will purge all the copied rows
// and maintain the deleted status of the moved rows.
int num_rows_to_move = this.page.recordCount() - splitpoint;
if (SanityManager.DEBUG)
SanityManager.ASSERT(num_rows_to_move >= 0);
if (num_rows_to_move != 0)
{
this.page.copyAndPurge(
newleaf.page, splitpoint, num_rows_to_move, 1);
}
// Test fail after new page has been updated.
if (SanityManager.DEBUG)
{
if (SanityManager.DEBUG_ON("leaf_split_abort2"))
{
throw StandardException.newException(
SQLState.BTREE_ABORT_THROUGH_TRACE);
}
}
// Test fail after new page has been updated.
if (SanityManager.DEBUG)
{
if (SanityManager.DEBUG_ON("leaf_split_abort3"))
{
throw StandardException.newException(
SQLState.BTREE_ABORT_THROUGH_TRACE);
}
}
// Find spot to insert branch row, and insert it.
BranchRow branch_template =
BranchRow.createEmptyTemplate(
open_btree.getRawTran(),
open_btree.getConglomerate());
SearchParameters sp =
new SearchParameters(
branchrow.getRow(),
SearchParameters.POSITION_LEFT_OF_PARTIAL_KEY_MATCH,
branch_template.getRow(),
open_btree, false);
parent_page.searchForEntry(sp);
// There must be space on the parent to insert the row!
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(
parent_page.page.spaceForInsert(
branchrow.getRow(), (FormatableBitSet) null,
AccessFactoryGlobals.BTREE_OVERFLOW_THRESHOLD));
}
byte insertFlag = Page.INSERT_INITIAL;
insertFlag |= Page.INSERT_DEFAULT;
insertFlag |= Page.INSERT_UNDO_WITH_PURGE;
if (parent_page.page.insertAtSlot(
sp.resultSlot + 1,
branchrow.getRow(),
(FormatableBitSet) null,
(LogicalUndo)null,
insertFlag,
AccessFactoryGlobals.BTREE_OVERFLOW_THRESHOLD) == null) {
throw StandardException.newException(
SQLState.BTREE_NO_SPACE_FOR_KEY);
}
// branchrow is only valid while split_leaf_row remains unchanged.
branchrow = null;
// RESOLVE (mikem) - this case breaks the btree currently - as the
// abort of the insert leaves a logical delete in the tree.
//
// Test fail after parent page has been updated.
if (SanityManager.DEBUG)
{
if (SanityManager.DEBUG_ON("leaf_split_abort4"))
{
throw StandardException.newException(
SQLState.BTREE_ABORT_THROUGH_TRACE);
}
}
if (SanityManager.DEBUG)
{
if (SanityManager.DEBUG_ON("enableBtreeConsistencyCheck"))
{
this.checkConsistency(open_btree, parent_page, false);
newleaf.checkConsistency(open_btree, parent_page, false);
parent_page.checkConsistency(open_btree, null, false);
}
}
// Set a hint in the page that any scan positioned on it needs
// to reposition because rows may have moved off the page.
page.setRepositionNeeded();
// At this point a unit of work in the split down the tree has
// been performed in an internal transaction. This work must
// be committed before any latches are released.
open_btree.getXactMgr().commit();
parent_page.release();
this.release(); // XXX (nat) Not good form to unlatch self.
long new_leaf_pageno = newleaf.page.getPageNumber();
newleaf.release();
// Because we are at the leaf level and have completed the split
// there is no more work, no latches should be held, and control
// is returned up the recursive stack, to the insert causing the
// split. Because latches are released, the inserter must recheck
// that there is now space available as some other thread of control
// could get in before he latches the page again.
return(new_leaf_pageno);
}
/**
** Grow a new root page from a leaf page. Slightly
** tricky because we want to retain page 0 as the root.
** <P>
** On entry, the current leaf root page is expected
** to be latched. On exit, all latches will have been
** released.
** <P>
** The caller cannot not assume success. If we have to release latches
** this routine just returns and assumes the caller will retry the
** grow root if necessary.
**/
private static void growRoot(
OpenBTree open_btree,
DataValueDescriptor[] template,
LeafControlRow leafroot)
throws StandardException
{
BranchControlRow branchroot = null;
LeafControlRow newleaf = null;
// Allocate a new leaf page under the existing leaf root.
newleaf = LeafControlRow.allocate(open_btree, leafroot);
// Test fail after allocation
if (SanityManager.DEBUG)
{
if (SanityManager.DEBUG_ON("leaf_split_growRoot1"))
{
throw StandardException.newException(
SQLState.BTREE_ABORT_THROUGH_TRACE);
}
}
// Copy all the index rows from the root to the new leaf, do not
// copy the control row. This routine will purge all the copied
// rows and maintain the deleted status of the moved rows.
if (SanityManager.DEBUG)
SanityManager.ASSERT((leafroot.page.recordCount() - 1) > 0);
leafroot.page.copyAndPurge(
newleaf.page, 1, leafroot.page.recordCount() - 1, 1);
// Test fail after row copy
if (SanityManager.DEBUG)
{
if (SanityManager.DEBUG_ON("leaf_split_growRoot2"))
{
throw StandardException.newException(
SQLState.BTREE_ABORT_THROUGH_TRACE);
}
}
// Test fail after purge
if (SanityManager.DEBUG)
{
if (SanityManager.DEBUG_ON("leaf_split_growRoot3"))
{
// Make sure tree is very trashed and logical recovery will
// not work.
leafroot.setLevel(42);
leafroot.setParent(42);
throw StandardException.newException(
SQLState.BTREE_ABORT_THROUGH_TRACE);
}
}
// Put a branch control row on the root page, making the new leaf
// the left child. All leaf splits result in level-1 branch pages.
// This will be a branch-root page.
// Construction of the BranchControlRow will set it as the aux
// object for the page, this in turn invalidates the previous aux
// object which is leafroot. Thus leafroot must not be used once
// the constructor returns.
branchroot = new BranchControlRow(
open_btree, leafroot.page, 1, null, true,
newleaf.page.getPageNumber());
leafroot = null;
// Replace the old leaf root control row with the new branch root
// control row.
branchroot.page.updateAtSlot(
0, branchroot.getRow(), (FormatableBitSet) null);
// Test fail after purge
if (SanityManager.DEBUG)
{
if (SanityManager.DEBUG_ON("leaf_split_growRoot4"))
{
throw StandardException.newException(
SQLState.BTREE_ABORT_THROUGH_TRACE);
}
}
if (SanityManager.DEBUG)
{
if (SanityManager.DEBUG_ON("enableBtreeConsistencyCheck"))
{
newleaf.checkConsistency(open_btree, branchroot, false);
branchroot.checkConsistency(open_btree, null, false);
}
}
// Set a hint in the page that any scan positioned on it needs
// to reposition because the page is no longer a leaf.
branchroot.page.setRepositionNeeded();
// At this point a unit of work in the split down the tree has
// been performed in an internal transaction. This work must
// be committed before any latches are released.
open_btree.getXactMgr().commit();
// Test fail after commit of split
if (SanityManager.DEBUG)
{
if (SanityManager.DEBUG_ON("leaf_split_growRoot5"))
{
throw StandardException.newException(
SQLState.BTREE_ABORT_THROUGH_TRACE);
}
}
// The variable 'branchroot' refers to a page that was latched by
// leafroot. After a growRoot() from a leaf there will be no pages
// latched. It is up to the callers to reget the root page latched
// and continue their work.
//
if (branchroot != null)
branchroot.release();
if (leafroot != null)
leafroot.release();
if (newleaf != null)
newleaf.release();
}
/**
* Return the left child pointer for the page.
* <p>
* Leaf pages don't have children, so they override this and return null.
*
* @return The page which is the leftmost child of this page.
*
* @param btree The open btree to associate latches/locks with.
*
* @exception StandardException Standard exception policy.
**/
protected ControlRow getLeftChild(OpenBTree btree)
throws StandardException
{
return(null);
}
/**
* Return the right child pointer for the page.
* <p>
* Leaf pages don't have children, so they override this and return null.
*
* @return The page which is the rightmost child of this page.
*
* @param btree The open btree to associate latches/locks with.
*
* @exception StandardException Standard exception policy.
**/
protected ControlRow getRightChild(OpenBTree btree)
throws StandardException
{
return(null);
}
/*
** Debug/consistency check Methods of ControlRow:
*/
/**
** Perform consistency checks on a leaf page.
**
** Check consistency of the page and its children,
** returning the number of pages seen, and throwing
** errors if inconsistencies are found.
** The checks specific to a leaf page are:
** <menu>
** <li> Page is at level 0.
** <li> Version is a valid leaf page version.
** <li> Control row has right number of columns for leaf.
** </menu>
** This method also performs the consistency checks that
** are common to both leaf and branch pages.
** @see ControlRow#checkGeneric
**
** @exception StandardException Standard exception policy.
**/
public int checkConsistency(
OpenBTree btree,
ControlRow parent,
boolean check_other_pages
)
throws StandardException
{
// Do the consistency checks that are common to all
// types of pages.
checkGeneric(btree, parent, check_other_pages);
// Leaf specific, control row checks
if (SanityManager.DEBUG)
{
SanityManager.ASSERT(this.getLevel() == 0, "leaf not at level 0");
// RESOLVE (mikem) - how to sanity check correct version?
/*
if (this.getVersion() != CURRENT_LEAF_VERSION)
SanityManager.THROWASSERT(
"Expected leaf version:(" +
CURRENT_LEAF_VERSION + ") but got (" +
this.getVersion());
*/
SanityManager.ASSERT(
this.page.fetchNumFieldsAtSlot(CR_SLOT) ==
ControlRow.CR_NCOLUMNS);
// The remaining checks are specific to leaf pages.
// Check that every row has at least as many columns
// as the number of key fields in the b-tree.
int numslots = this.page.recordCount();
for (int slot = 1; slot < numslots; slot++)
{
if (this.page.fetchNumFieldsAtSlot(slot) <
btree.getConglomerate().nKeyFields)
SanityManager.THROWASSERT(
"row[" + slot + "]"
+ " has " + this.page.fetchNumFieldsAtSlot(slot)
+ " columns, should have at least" +
btree.getConglomerate().nKeyFields);
// RESOLVE - the generic btree code should know nothing about
// the secondaryindex row location column, but put this here for
// now because I can't figure how to get a call out to the
// secondary index code at the page level consistency checking
// level.
}
}
// We checked one page (this one).
return 1;
}
/**
** Recursively print the tree starting at current node in tree.
** This is a leaf so return.
@exception StandardException Standard exception policy.
**/
public void printTree(
OpenBTree btree)
throws StandardException
{
if (SanityManager.DEBUG)
{
SanityManager.DEBUG_PRINT("p_tree", this.debugPage(btree));
return;
}
}
/*
* Methods of TypedFormat:
*/
/**
Return my format identifier.
@see org.apache.derby.iapi.services.io.TypedFormat#getTypeFormatId
*/
public int getTypeFormatId()
{
return StoredFormatIds.ACCESS_BTREE_LEAFCONTROLROW_V1_ID;
}
}
|
apache/rya | 37,043 | extras/indexing/src/main/java/org/apache/rya/indexing/accumulo/freetext/query/QueryParserTreeConstants.java | /* Generated By:JavaCC: Do not edit this line. QueryParserTreeConstants.java Version 5.0 */
package org.apache.rya.indexing.accumulo.freetext.query;
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
/*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
public interface QueryParserTreeConstants
{
public int JJTSIMPLENODE = 0;
public int JJTEXPRESSION = 1;
public int JJTVOID = 2;
public int JJTTERM = 3;
public String[] jjtNodeName = {
"SimpleNode",
"Expression",
"void",
"Term",
};
}
/* JavaCC - OriginalChecksum=7db3f19ae343b33492ca4cbb4cb236be (do not edit this line) */
|
google/closure-compiler | 35,745 | test/com/google/javascript/jscomp/DeadAssignmentsEliminationTest.java | /*
* Copyright 2008 The Closure Compiler Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.javascript.jscomp;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
/** Tests for {@link DeadAssignmentsElimination}. */
@RunWith(JUnit4.class)
public final class DeadAssignmentsEliminationTest extends CompilerTestCase {
public DeadAssignmentsEliminationTest() {
super("var extern;");
}
@Before
public void customSetUp() throws Exception {
enableNormalize();
}
@Override
protected CompilerPass getProcessor(final Compiler compiler) {
return (externs, js) ->
NodeTraversal.traverse(compiler, js, new DeadAssignmentsElimination(compiler));
}
@Test
public void testSimple() {
inFunction("var a; a=1", "var a; 1");
inFunction("var a; a=1+1", "var a; 1+1");
inFunction("var a; a=foo();", "var a; foo()");
inFunction("var a; a=foo?.();", "var a; foo?.()");
inFunction("a=1; var a; a=foo();", "1; var a; foo();");
inFunction("a=1; var a; a=foo?.();", "1; var a; foo?.();");
// This should be: "var a; (function f(){})", but we don't mess with
// functions with inner functions.
inFunction("var a; a=function f(){}");
}
@Test
public void testPropAssignmentNotRemoved() {
// We only remove dead assignments when lhs is a name node.
inFunction("var a = {b:1}; a.b=1+1");
inFunction("var a = {b:1}; a.b=foo();");
}
@Test
public void testArguments() {
test("function f(a){ a=1; }", "function f(a){ 1; }");
test("function f(a){ a=1+1; }", "function f(a){ 1+1; }");
test("function f(a){ a=foo(); }", "function f(a){ foo(); }");
test("function f(a){ a=foo?.(); }", "function f(a){ foo?.(); }");
test("function f(a){ a=1; a=foo(); }", "function f(a){ 1; foo(); }");
test("function f(a){ a=1; a=foo?.(); }", "function f(a){ 1; foo?.(); }");
}
@Test
public void testLoops() {
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
inFunction("for(var a=0; a<10; a++) {}");
inFunction("var x; for(var a=0; a<10; a++) {x=a}; a(x)");
inFunction("var x; for(var a=0; x=a<10; a++) {}", "var x; for(var a=0; a<10; a++) {}");
inFunction("var x; for(var a=0; a<10; x=a) {}", "var x; for(var a=0; a<10; a) {}");
inFunction("var x; for(var a=0; a<10; x=a,a++) {}", "var x; for(var a=0; a<10; a,a++) {}");
inFunction("var x; for(var a=0; a<10; a++,x=a) {}", "var x; for(var a=0; a<10; a++,a) {}");
inFunction("var x;for(var a=0; a<10; a++) {x=1}", "var x;for(var a=0; a<10; a++) {1}");
inFunction("var x; x=1; do{x=2}while(0); x", "var x; 1; do{x=2}while(0); x");
inFunction("var x; x=1; while(1){x=2}; x");
}
@Test
public void testMultiPaths() {
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
inFunction("var x,y; if(x)y=1;", "var x,y; if(x)1;");
inFunction("var x,y; if(x)y=1; y=2; x(y)", "var x,y; if(x)1; y=2; x(y)");
inFunction("var x; switch(x) { case(1): x=1; break; } x");
inFunction(
"var x; switch(x) { case(1): x=1; break; }", "var x; switch(x) { case(1): 1; break; }");
}
@Test
public void testUsedAsConditions() {
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
inFunction("var x; while(x=1){}", "var x; while(1){}");
inFunction("var x; if(x=1){}", "var x; if(1){}");
inFunction("var x; do{}while(x=1)", "var x; do{}while(1)");
inFunction("var x; if(x=1==4&&1){}", "var x; if(1==4&&1) {}");
inFunction("var x; if(0&&(x=1)){}", "var x; if(0&&1){}");
inFunction("var x; if((x=2)&&(x=1)){}", "var x; if(2&&1){}");
inFunction("var x; x=2; if(0&&(x=1)){}; x");
inFunction("var x,y; if( (x=1)+(y=2) > 3){}", "var x,y; if( 1+2 > 3){}");
}
@Test
public void nullishCoalesce() {
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
inFunction("var x; if(x=1==4??1){}", "var x; if(1==4??1) {}");
inFunction("var x; if(0??(x=1)){}", "var x; if(0??1){}");
inFunction("var x; if((x=2)??(x=1)){}", "var x; if(2??1){}");
inFunction("var x; x=2; if(0??(x=1)){}; x");
inFunction("var a, b; if ((a = 1) ?? (b = a)) {b}");
inFunction("var a, b; if ((b = a) ?? (a = 1)) {b}", "var a, b; if ((b = a) ?? (1)) {b}");
inFunction("var a; (a = 1) ?? (a = 2)", "var a; 1 ?? 2");
}
@Test
public void testUsedAsConditionsInSwitchStatements() {
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
inFunction("var x; switch(x=1){}", "var x; switch(1){}");
inFunction("var x; switch(x){case(x=1):break;}", "var x; switch(x){case(1):break;}");
inFunction("var x,y; switch(y) { case (x += 1): break; case (x): break;}");
inFunction(
"var x,y; switch(y) { case (x = 1): break; case (2): break;}",
"var x,y; switch(y) { case (1): break; case (2): break;}");
inFunction(
"var x,y; switch(y) { case (x+=1): break; case (x=2): break;}",
"var x,y; switch(y) { case (x+1): break; case (2): break;}");
}
@Test
public void testAssignmentInReturn() {
inFunction("var x; return x = 1;", "var x; return 1");
inFunction("var x; return");
}
@Test
public void testAssignmentSamples() {
// We want this to be "var x" in these cases.
inFunction("var x = 2;");
inFunction("var x = 2; x++;", "var x=2; void 0");
inFunction("var x; x=x++;", "var x;x++");
inFunction("var x; x+=1;", "var x;x+1");
}
@Test
public void testAssignmentInArgs() {
inFunction("var x; foo(x = 1);", "var x; foo(1);");
inFunction("var x; return foo(x = 1);", "var x; return foo(1);");
}
/** BUG #1358904 */
@Test
public void testAssignAndReadInCondition() {
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
inFunction("var a, b; if ((a = 1) && (b = a)) {b}");
inFunction("var a, b; if ((b = a) && (a = 1)) {b}", "var a, b; if ((b = a) && (1)) {b}");
}
@Test
public void testParameters() {
inFunction("param1=1; param1=2; param2(param1)", "1; param1=2; param2(param1)");
inFunction("param1=param2()", "param2()");
}
@Test
public void testErrorHandling() {
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
inFunction("var x; try{ x=1 } catch(e){ x=2 }; x");
inFunction("var x; try{ x=1 } catch(e){ x=2 }", "var x;try{ 1 } catch(e) { 2 }");
inFunction("var x; try{ x=1 } finally { x=2 }; x", "var x;try{ 1 } finally{ x=2 }; x");
inFunction("var x; while(1) { try{x=1;break}finally{x} }");
inFunction("var x; try{throw 1} catch(e){x=2} finally{x}");
inFunction(
"var x; try{x=1;throw 1;x} finally{x=2}; x", "var x; try{1;throw 1;x} finally{x=2}; x");
}
@Test
public void testErrorHandling2() {
inFunction(
"""
try {
} catch (e) {
e = 1;
let g = e;
print(g)
}
""");
inFunction(
"""
try {
} catch (e) {
e = 1;
{
let g = e;
print(g)
}
}
""");
}
@Test
public void testDeadVarDeclarations1() {
inFunction("var x=1; x=2; x", "var x; 1; x=2; x");
}
@Test
public void testDeadVarDeclarations2() {
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
inFunction("var x=1;");
inFunction("var x=1; x=2; x", "var x; 1; x=2; x");
inFunction("var x=1, y=10; x=2; x", "var x; 1; var y; 10; x=2; x");
inFunction("var x=1, y=x; y");
inFunction("var x=1, y=x; x=2; x", "var x = 1; var y; x; x=2; x;");
}
@Test
public void testDeadVarDeclarations_forLoop() {
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
inFunction("for(var x=1;;);");
inFunction("for(var x=1,y=x;;);");
inFunction("for(var x=1;10;);");
}
@Test
public void testGlobal() {
// Doesn't do any work on global scope yet.
test("var x; x=1; x=2; x=3;", "var x; x=1; x=2; x=3;");
}
@Test
public void testInnerFunctions() {
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
inFunction("var x = function() { var x; x=1; }", "var x = function() { var x; 1; }");
}
@Test
public void testInnerFunctions2() {
// Give up DCE if there is a inner function.
inFunction("var x = 0; print(x); x = 1; var y = function(){}; y()");
}
@Test
public void testSelfReAssignment() {
inFunction("var x; x = x;", "var x; x");
}
@Test
public void testSelfIncrement() {
inFunction("var x; x = x + 1;", "var x; x + 1");
}
@Test
public void testAssignmentOp() {
// We have remove constant expressions that cleans this one up.
inFunction("var x; x += foo()", "var x; x + foo()");
}
@Test
public void testAssignmentOpUsedAsLhs() {
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
inFunction("var x,y; y = x += foo(); print(y)", "var x,y; y = x + foo(); print(y)");
inFunction("var x,y; y = x += foo?.(); print(y)", "var x,y; y = x + foo?.(); print(y)");
}
@Test
public void testAssignmentOpUsedAsCondition() {
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
inFunction("var x; if(x += foo()) {}", "var x; if(x + foo()) {}");
inFunction("var x; if(x += foo?.()) {}", "var x; if(x + foo?.()) {}");
inFunction("var x; if((x += foo()) > 1) {}", "var x; if((x + foo()) > 1) {}");
// Not in a while because this happens every loop.
inFunction("var x; while((x += foo()) > 1) {}");
inFunction("var x; for(;--x;){}");
inFunction("var x; for(;x--;){}");
inFunction("var x; for(;x -= 1;){}");
inFunction("var x; for(;x = 0;){}", "var x; for(;0;){}");
inFunction("var x; for(;;--x){}");
inFunction("var x; for(;;x--){}");
inFunction("var x; for(;;x -= 1){}");
inFunction("var x; for(;;x = 0){}", "var x; for(;;0){}");
inFunction("var x; for(--x;;){}", "var x; void 0; for(;;){}");
inFunction("var x; for(x--;;){}", "var x; void 0; for(;;){}");
inFunction("var x; for(x -= 1;;){}", "var x; for(x - 1;;){}");
inFunction("var x; for(x = 0;;){}", "var x; for(0;;){}");
}
@Test
public void testDeadIncrement() {
// TODO(user): Optimize this.
inFunction("var x; x ++", "var x; void 0");
inFunction("var x; x --", "var x; void 0");
}
@Test
public void testDeadButAlivePartiallyWithinTheExpression() {
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
inFunction("var x; x = 100, print(x), x = 101;", "var x; x = 100, print(x), 101;");
inFunction(
"var x; x = 100, print(x), print(x), x = 101;",
"var x; x = 100, print(x), print(x), 101;");
inFunction(
"var x; x = 100, print(x), x = 0, print(x), x = 101;",
"var x; x = 100, print(x), x = 0, print(x), 101;");
// Here, `a=C` is removed as it is dead. `X=a` is removed as it is dead.
inFunction(
"var a, C, X, S; if ((X = a) && (a = C)) {}; a = S;", //
"var a, C, X, S; if (a&&C) {}; S;");
// Here, `a=C` is preserved as it is NOT dead. `X=a` is removed as it is dead.
inFunction(
"var a, C, X, S; if ((a = C) && (X = a)) {}; a = S;",
"var a, C, X, S; if ((a = C) && a) {}; S;");
}
@Test
public void testMutipleDeadAssignmentsButAlivePartiallyWithinTheExpression() {
inFunction(
"""
var x; x = 1, x = 2, x = 3, x = 4, x = 5,
print(x), x = 0, print(x), x = 101;
""",
"var x; 1, 2, 3, 4, x = 5, print(x), x = 0, print(x), 101;");
}
@Test
public void testDeadPartiallyWithinTheExpression() {
// Sadly, this is not covered. We don't suspect this would happen too
// often.
inFunction("var x; x = 100, x = 101; print(x);");
}
@Test
public void testAssignmentChain() {
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
inFunction("var a,b,c,d,e; a = b = c = d = e = 1", "var a,b,c,d,e; 1");
inFunction(
"var a,b,c,d,e; a = b = c = d = e = 1; print(c)",
"var a,b,c,d,e; c = 1 ; print(c)");
inFunction(
"var a,b,c,d,e; a = b = c = d = e = 1; print(a + e)",
"var a,b,c,d,e; a = e = 1; print(a + e)");
inFunction(
"var a,b,c,d,e; a = b = c = d = e = 1; print(b + d)",
"var a,b,c,d,e; b = d = 1; print(b + d)");
inFunction(
"var a,b,c,d,e; a = b = c = d = e = 1; print(a + b + d + e)",
"var a,b,c,d,e; a = b = d = e = 1; print(a + b + d + e)");
inFunction("var a,b,c,d,e; a = b = c = d = e = 1; print(a+b+c+d+e)");
}
@Test
public void testAssignmentOpChain() {
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
inFunction("var a,b,c,d,e; a = b = c += d = e = 1", "var a,b,c,d,e; c + 1");
inFunction(
"var a,b,c,d,e; a = b = c += d = e = 1; print(e)",
"var a,b,c,d,e; c + (e = 1); print(e)");
inFunction(
"var a,b,c,d,e; a = b = c += d = e = 1; print(d)",
"var a,b,c,d,e; c + (d = 1) ; print(d)");
inFunction(
"var a,b,c,d,e; a = b = c += d = e = 1; print(a)",
"var a,b,c,d,e; a = c + 1; print(a)");
}
@Test
public void testIncDecInSubExpressions() {
inFunction("var a; a = 1, a++; a");
inFunction("var a; a = 1, ++a; a");
inFunction("var a; a = 1, a--; a");
inFunction("var a; a = 1, --a; a");
inFunction("var a; a = 1, a++, print(a)");
inFunction("var a; a = 1, ++a, print(a)");
inFunction("var a; a = 1, a--, print(a)");
inFunction("var a; a = 1, --a, print(a)");
inFunction("var a; a = 1, print(a++)");
inFunction("var a; a = 1, print(++a)");
inFunction("var a; a = 1, print(a++)");
inFunction("var a; a = 1, print(++a)");
inFunction("var a; a = 1, print(a--)");
inFunction("var a; a = 1, print(--a)");
}
@Test
public void testNestedReassignments() {
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
inFunction("var a; a = (a = 1)", "var a; 1");
inFunction("var a; a = (a *= 2)", "var a; a*2");
// Note a = (a++) is not same as a++. Only if 'a' is dead.
inFunction("var a; a = (a++)", "var a; a++"); // Preferred: "var a"
inFunction("var a; a = (++a)", "var a; ++a"); // Preferred: "var a"
inFunction("var a; a = (b = (a = 1))", "var a; b = 1");
inFunction("var a; a = (b = (a *= 2))", "var a; b = a * 2");
inFunction("var a; a = (b = (a++))", "var a; b=a++");
inFunction("var a; a = (b = (++a))", "var a; b=++a");
// Include b as local.
inFunction("var a,b; a = (b = (a = 1))", "var a,b; 1");
inFunction("var a,b; a = (b = (a *= 2))", "var a,b; a * 2");
inFunction("var a,b; a = (b = (a++))", "var a,b; a++"); // Preferred: "var a,b"
inFunction("var a,b; a = (b = (++a))", "var a,b; ++a"); // Preferred: "var a,b"
inFunction("var a; a += (a++)", "var a; a + a++");
inFunction("var a; a += (++a)", "var a; a+ (++a)");
// Include b as local.
inFunction("var a,b; a += (b = (a = 1))", "var a,b; a + 1");
inFunction("var a,b; a += (b = (a *= 2))", "var a,b; a + (a * 2)");
inFunction("var a,b; a += (b = (a++))", "var a,b; a + a++");
inFunction("var a,b; a += (b = (++a))", "var a,b; a+(++a)");
}
@Test
public void testIncrementalReassignmentInForLoops() {
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
inFunction("for(;x+=1;x+=1) {}");
inFunction("for(;x;x+=1){}");
inFunction("for(;x+=1;){foo(x)}");
inFunction("for(;1;x+=1){foo(x)}");
}
@Test
public void testIdentityAssignments() {
inFunction("var x; x=x", "var x; x");
inFunction("var x; x.y=x.y");
}
@Test
public void testBug8730257() {
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
inFunction(
"""
try {
var sortIndices = {};
sortIndices = bar();
for (var i = 0; i < 100; i++) {
var sortIndex = sortIndices[i];
bar(sortIndex);
}
} finally {
bar();
}
""");
}
@Test
public void testAssignToExtern() {
inFunction("extern = true;");
}
@Test
public void testIssue297a() {
testSame(
"""
function f(p) {
var x;
return ((x=p.id) && (x=parseInt(x.substr(1))) && x>0);
}; f('');
""");
}
@Test
public void testIssue297b() {
test(
"""
function f() {
var x;
return (x='') && (x = x.substr(1));
};
""",
"""
function f() {
var x;
return (x='') && (x.substr(1));
};
""");
}
@Test
public void testIssue297c() {
test(
"""
function f() {
var x;
return (x=1) && (x = f(x));
};
""",
"""
function f() {
var x;
return (x=1) && f(x);
};
""");
}
@Test
public void testIssue297d() {
test(
"""
function f(a) {
return (a=1) && (a = f(a));
};
""",
"""
function f(a) {
return (a=1) && (f(a));
};
""");
}
@Test
public void testIssue297e() {
test(
"""
function f(a) {
return (a=1) - (a = g(a));
};
""",
"""
function f(a) {
return (a=1) - (g(a));
};
""");
}
@Test
public void testIssue297f() {
test(
"""
function f(a) {
h((a=1) - (a = g(a)));
};
""",
"""
function f(a) {
h((a=1) - (g(a)));
};
""");
}
@Test
public void testIssue297g() {
test(
"""
function f(a) {
var b = h((b=1) - (b = g(b)));
return b;
};
""",
// The last assignment in the initializer should be eliminated
"""
function f(a) {
var b = h((b=1) - (b = g(b)));
return b;
};
""");
}
@Test
public void testIssue297h() {
test(
"""
function f(a) {
var b = b=1;
return b;
};
""",
// The assignment in the initializer should be eliminated
"""
function f(a) {
var b = b = 1;
return b;
};
""");
}
@Test
public void testInExpression0() {
inFunction("var a; return a=(a=(a=a));", "var a; return a;");
}
@Test
public void testInExpression1() {
inFunction("var a; return a=(a=(a=3));", "var a; return 3;");
inFunction("var a; return a=(a=(a=a));", "var a; return a;");
inFunction("var a; return a=(a=(a=a+1)+1);", "var a; return a+1+1;");
inFunction("var a; return a=(a=(a=f(a)+1)+1);", "var a; return f(a)+1+1;");
inFunction("var a; return a=f(a=f(a=f(a)));", "var a; return f(f(f(a)));");
}
@Test
public void testInExpression2() {
inFunction("var a; (a = 1) || (a = 2)", "var a; 1 || 2");
inFunction("var a; (a = 1) || (a = 2); return a");
inFunction("var a; a = 1; a ? a = 2 : a;", "var a; a = 1; a ? 2 : a;");
inFunction("var a; a = 1; a ? a = 2 : a; return a");
inFunction("var a; a = 1; a ? a : a = 2;", "var a; a = 1; a ? a : 2;");
inFunction("var a; a = 1; a ? a : a =2; return a");
inFunction("var a; (a = 1) ? a = 2 : a = 3;", "var a; 1 ? 2 : 3;");
// This can be improved. "a = 1" is dead but "a" is read in the following
// expression.
inFunction("var a; (a = 1) ? a = 2 : a = 3; return a");
}
@Test
public void testIssue384a() {
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
inFunction(
"""
var a, b;
if (f(b = true) || f(b = false))
a = b;
else
a = null;
return a;
""");
}
@Test
public void testIssue384b() {
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
inFunction(
"""
var a, b;
(f(b = true) || f(b = false)) ? (a = b) : (a = null);
return a;
""");
}
@Test
public void testIssue384c() {
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
inFunction(
"""
var a, b;
(a ? f(b = true) : f(b = false)) && (a = b);
return a;
""");
}
@Test
public void testIssue384d() {
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
inFunction(
"""
var a, b;
(f(b = true) || f(b = false)) && (a = b);
return a;
""");
}
@Test
public void testForIn() {
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
inFunction("var x = {}; for (var y in x) { y() }");
inFunction(
"var x, y, z; x = {}; z = {}; for (y in x = z) { y() }",
"var x, y, z; ({}); z = {}; for (y in z) { y() }");
inFunction(
"var x, y, z; x = {}; z = {}; for (y[z=1] in z) { y() }",
"var x, y, z; ({}); z = {}; for (y[z=1] in z) { y() }");
// "x in z" doesn't overwrite x if z is empty.
// TODO(user): If you look outside of just liveness, x = {} is dead.
// That probably requires value numbering or SSA to detect that case.
inFunction("var x, y, z; x = {}; z = {}; for (x in z) { x() }");
}
@Test
public void testArrowFunction() {
test("() => {var x; x = 1}", "() => {var x; 1}");
test("(a) => {a = foo()}", "(a) => {foo()}");
}
@Test
public void testClassMethods() {
test(
"""
class C{
func() {
var x;
x = 1;
}
}
""",
"""
class C{
func() {
var x;
1;
}
}
""");
test(
"""
class C{
constructor(x, y) {
this.x = x;
this.y = y;
}
func() {
var z;
z = 1;
this.x = 3
}
}
""",
"""
class C{
constructor(x, y) {
this.x = x;
this.y = y;
}
func() {
var z;
1;
this.x = 3
}
}
""");
}
@Test
public void testClassStaticBlocks() {
// TODO(b/240443227): Improve ClassStaticBlock optimization, dead code is not removed in
// expression.
testSame(
"""
class C{
static{
var x;
x = 1;
}
}
""");
testSame(
"""
var x = 0;
print(x);
x = 1;
class C {
static {
print(x);
}
}
""");
}
@Test
public void testGenerators() {
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
test(
"""
function* f() {
var x, y;
x = 1; y = 2;
yield y;
}
""",
"""
function* f() {
var x, y;
1; y = 2;
yield y;
}
""");
}
@Test
public void testForOf() {
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
inFunction("var x = {}; for (var y of x) { y() }");
inFunction(
"var x, y, z; x = {}; z = {}; for (y of x = z) {}",
"var x, y, z; ({}); z = {}; for (y of z) {}");
}
@Test
public void testForAwaitOf() {
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
inAsyncFunction("var x = {}; for await (var y of x) { y() }");
inAsyncFunction(
"var x, y, z; x = {}; z = {}; for await (y of x = z) {}",
"var x, y, z; ({}); z = {}; for await (y of z) {}");
}
@Test
public void testTemplateStrings() {
inFunction("var name; name = 'Foo'; `Hello ${name}`");
inFunction(
"var name; name = 'Foo'; name = 'Bar'; `Hello ${name}`",
"var name; 'Foo'; name = 'Bar'; `Hello ${name}`");
}
@Test
public void testDestructuring() {
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
inFunction("var a, b, c; [a, b, c] = [1, 2, 3];");
inFunction("var a, b, c; [a, b, c] = [1, 2, 3]; return a + c;");
inFunction(
"var a, b; a = 1; b = 2; [a, b] = [3, 4]; return a + b;",
"var a, b; 1; 2; [a, b] = [3, 4]; return a + b;");
inFunction("var x; x = {}; [x.a] = [3];");
}
@Test
public void testDestructuringDeclarationRvalue() {
// Test array destructuring
inFunction(
"""
let arr = []
if (CONDITION) {
arr = [3];
}
let [foo] = arr;
use(foo);
""");
// Test object destructuring
inFunction(
"""
let obj = {}
if (CONDITION) {
obj = {foo: 3};
}
let {foo} = obj;
use(foo);
""");
}
@Test
public void testDestructuringAssignmentRValue() {
// Test array destructuring
inFunction(
"""
let arr = []
if (CONDITION) {
arr = [3];
}
let foo;
[foo] = arr;
use(foo);
""");
// Test object destructuring
inFunction(
"""
let obj = {}
if (CONDITION) {
obj = {foo: 3};
}
let foo;
({foo} = obj);
use(foo);
""");
}
@Test
public void testForOfWithDestructuring() {
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
inFunction(
"""
let x;
x = [];
var y = 5; // Don't eliminate because if arr is empty, y will remain 5.
for ([y = x] of arr) { y; }
y;
""");
inFunction(
"""
let x;
x = [];
for (let [y = x] of arr) { y; }
""");
inFunction("for (let [key, value] of arr) {}");
inFunction("for (let [key, value] of arr) { key; value; }");
inFunction(
"var a; a = 3; for (let [a] of arr) { a; }", "var a; 3; for (let [a] of arr) { a; }");
}
@Test
public void testReferenceInDestructuringPatternDefaultValue() {
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
inFunction(
"""
let bar = [];
const {foo = bar} = obj;
foo;
""");
inFunction(
"""
let bar;
bar = [];
const {foo = bar} = obj;
foo;
""");
inFunction("let bar; bar = 3; const [foo = bar] = arr; foo;");
inFunction("let foo, bar; bar = 3; [foo = bar] = arr; foo;");
}
@Test
public void testReferenceInDestructuringPatternComputedProperty() {
// TODO(bradfordcsmith): Stop normalizing the expected output or document why it is necessary.
enableNormalizeExpectedOutput();
inFunction("let str; str = 'bar'; const {[str + 'baz']: foo} = obj; foo;");
inFunction(
"""
let obj = {};
let str, foo;
str = 'bar';
({[str + 'baz']: foo} = obj);
foo;
""");
}
@Test
public void testDefaultParameter() {
test(
"""
function f(x, y = 12) {
var z;
z = y;
}
""",
"""
function f(x, y = 12) {
var z;
y;
}
""");
}
@Test
public void testObjectLiterals() {
test(
"""
var obj = {
f() {
var x;
x = 2;
}
}
""",
"""
var obj = {
f() {
var x;
2;
}
}
""");
}
@Test
public void testObjectLiteralsComputedProperties() {
inFunction("let a; a = 2; let obj = {[a]: 3}; obj");
}
@Test
public void testSpread_consideredRead() {
inFunction(
"""
var a;
a = [];
[...a];
""");
inFunction(
"""
var a;
a = {};
({...a});
""");
}
@Test
public void testRest_notConsideredWrite() {
// TODO(b/126441776): The initial writes are dead. The pass should rewrite to the commented
// code.
inFunction(
"""
var a = 9;
[...a] = itr;
return a;
"""
// ,
// """
// var a;
// [...a] = itr;
// return a;
// """
);
inFunction(
"""
var a = 9;
({...a} = obj);
return a;
"""
// ,
// """
// var a;
// ({...a} = obj);
// return a;
// """
);
}
@Test
public void testDestructuring_notConsideredWrite() {
// TODO(b/126441776): The initial writes are dead. The pass should rewrite to the commented
// code.
inFunction(
"""
var a = 9;
[a] = itr;
return a;
"""
// ,
// """
// var a;
// [a] = itr;
// return a;
// """
);
inFunction(
"""
var a = 9;
({a} = obj);
return a;
"""
// ,
// """
// var a;
// ({a} = obj);
// return a;
// """
);
}
@Test
public void testRest_isNotRemovable() {
// TODO(b/126441776): Elimination is possible here under getter/setter assumptions. Determine if
// this is the correct behaviour.
inFunction(
"""
var a;
[...a] = itr;
""");
inFunction(
"""
var a;
({...a} = obj);
""");
}
@Test
public void testDestructuring_isNotRemovable() {
// TODO(b/126441776): Elimination is possible here under getter/setter assumptions. Determine if
// this is the correct behaviour.
inFunction(
"""
var a;
[a] = itr;
""");
inFunction(
"""
var a;
({a} = obj);
""");
}
@Test
public void testLet() {
inFunction("let a; a = 2;", "let a; 2;");
inFunction(
"let a; let b; a = foo(); b = 2; return b;", "let a; let b; foo(); b = 2; return b;");
inFunction(
"let a; let b; a = foo?.(); b = 2; return b;", //
"let a; let b; foo?.(); b = 2; return b;");
}
@Test
public void testConst1() {
inFunction("const a = 1;");
}
@Test
public void testConst2() {
test(
"async function f(d) { if (d) { d = 5; } const a = 1; const b = 2; const [x, y] = b; }",
"async function f(d) { if (d) { 5; } const a = 1; const b = 2; const [x, y] = b; }");
}
@Test
public void testBlockScoping() {
inFunction(
"""
let x;
{
let x;
x = 1;
}
x = 2;
return x;
""",
"""
let x;
{
let x$jscomp$1;
1;
}
x = 2;
return x;
""");
inFunction(
"""
let x;
x = 2
{
let x;
x = 1;
}
print(x);
""",
"""
let x;
x = 2;
{
let x$jscomp$1;
1;
}
print(x);
""");
}
@Test
public void testComputedClassField() {
inFunction(
"""
let x;
class C {
static [x = 'field1'] = (x = 5);
[x = 'field2'] = 7;
}
""",
"""
let x;
class C {
static ['field1'] = 5;
['field2'] = 7;
}
""");
inFunction(
"""
let x;
class C {
static [x = 'field1'] = (x = 5);
[x = 'field2'] = 7;
}
use(x);
""");
inFunction(
"""
let x;
class C {
static field1 = x;
[x = 'field2'] = 7;
}
use(C.field1);
""",
"""
let x;
class C {
static field1 = x;
// TODO(b/189993301): don't remove 'x = field2' because it's read by 'field1 = x'
['field2'] = 7;
}
use(C.field1);
""");
}
@Test
public void testComputedClassMethod() {
inFunction(
"""
let x;
class C {
// NOTE: it would be correct to eliminate the following two assignments
static [x = 'field1']() {}
[x = 'field2']() {}
}
""");
inFunction(
"""
let x;
class C {
static [x = 'field1']() {};
[x = 'field2']() {}
}
use(x);
""");
inFunction(
"""
let x;
class C {
static field1 = x;
[x = 'field2']() {}
}
use(C.field1);
""");
}
private void inFunction(String src) {
inFunction(src, src);
}
private void inFunction(String src, String expected) {
test(
"function FUNC(param1, param2){" + src + "}",
"function FUNC(param1, param2){" + expected + "}");
}
private void inAsyncFunction(String src) {
inAsyncFunction(src, src);
}
private void inAsyncFunction(String src, String expected) {
test(
"async function FUNC(param1, param2){" + src + "}",
"async function FUNC(param1, param2){" + expected + "}");
}
}
|
googleapis/google-cloud-java | 36,869 | java-visionai/proto-google-cloud-visionai-v1/src/main/java/com/google/cloud/visionai/v1/ListAssetsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/visionai/v1/warehouse.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.visionai.v1;
/**
*
*
* <pre>
* Request message for ListAssets.
* </pre>
*
* Protobuf type {@code google.cloud.visionai.v1.ListAssetsRequest}
*/
public final class ListAssetsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.visionai.v1.ListAssetsRequest)
ListAssetsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListAssetsRequest.newBuilder() to construct.
private ListAssetsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListAssetsRequest() {
parent_ = "";
pageToken_ = "";
filter_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListAssetsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.visionai.v1.WarehouseProto
.internal_static_google_cloud_visionai_v1_ListAssetsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.visionai.v1.WarehouseProto
.internal_static_google_cloud_visionai_v1_ListAssetsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.visionai.v1.ListAssetsRequest.class,
com.google.cloud.visionai.v1.ListAssetsRequest.Builder.class);
}
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent, which owns this collection of assets.
* Format:
* `projects/{project_number}/locations/{location}/corpora/{corpus}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of assets.
* Format:
* `projects/{project_number}/locations/{location}/corpora/{corpus}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* The maximum number of assets to return. The service may return fewer than
* this value.
* If unspecified, at most 50 assets will be returned.
* The maximum value is 1000; values above 1000 will be coerced to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A page token, received from a previous `ListAssets` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListAssets` must match
* the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A page token, received from a previous `ListAssets` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListAssets` must match
* the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FILTER_FIELD_NUMBER = 5;
@SuppressWarnings("serial")
private volatile java.lang.Object filter_ = "";
/**
*
*
* <pre>
* The filter applied to the returned list.
* Only the following filterings are supported:
* "assets_with_contents = true", which returns assets with contents uploaded;
* "assets_with_contents = false", which returns assets without contents.
* </pre>
*
* <code>string filter = 5;</code>
*
* @return The filter.
*/
@java.lang.Override
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
}
}
/**
*
*
* <pre>
* The filter applied to the returned list.
* Only the following filterings are supported:
* "assets_with_contents = true", which returns assets with contents uploaded;
* "assets_with_contents = false", which returns assets without contents.
* </pre>
*
* <code>string filter = 5;</code>
*
* @return The bytes for filter.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 5, filter_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(5, filter_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.visionai.v1.ListAssetsRequest)) {
return super.equals(obj);
}
com.google.cloud.visionai.v1.ListAssetsRequest other =
(com.google.cloud.visionai.v1.ListAssetsRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getFilter().equals(other.getFilter())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (37 * hash) + FILTER_FIELD_NUMBER;
hash = (53 * hash) + getFilter().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.visionai.v1.ListAssetsRequest parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.ListAssetsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ListAssetsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.ListAssetsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ListAssetsRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.visionai.v1.ListAssetsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ListAssetsRequest parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.ListAssetsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ListAssetsRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.ListAssetsRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.visionai.v1.ListAssetsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.visionai.v1.ListAssetsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.visionai.v1.ListAssetsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for ListAssets.
* </pre>
*
* Protobuf type {@code google.cloud.visionai.v1.ListAssetsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.visionai.v1.ListAssetsRequest)
com.google.cloud.visionai.v1.ListAssetsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.visionai.v1.WarehouseProto
.internal_static_google_cloud_visionai_v1_ListAssetsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.visionai.v1.WarehouseProto
.internal_static_google_cloud_visionai_v1_ListAssetsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.visionai.v1.ListAssetsRequest.class,
com.google.cloud.visionai.v1.ListAssetsRequest.Builder.class);
}
// Construct using com.google.cloud.visionai.v1.ListAssetsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
pageSize_ = 0;
pageToken_ = "";
filter_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.visionai.v1.WarehouseProto
.internal_static_google_cloud_visionai_v1_ListAssetsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.visionai.v1.ListAssetsRequest getDefaultInstanceForType() {
return com.google.cloud.visionai.v1.ListAssetsRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.visionai.v1.ListAssetsRequest build() {
com.google.cloud.visionai.v1.ListAssetsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.visionai.v1.ListAssetsRequest buildPartial() {
com.google.cloud.visionai.v1.ListAssetsRequest result =
new com.google.cloud.visionai.v1.ListAssetsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.visionai.v1.ListAssetsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.filter_ = filter_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.visionai.v1.ListAssetsRequest) {
return mergeFrom((com.google.cloud.visionai.v1.ListAssetsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.visionai.v1.ListAssetsRequest other) {
if (other == com.google.cloud.visionai.v1.ListAssetsRequest.getDefaultInstance()) return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
if (!other.getFilter().isEmpty()) {
filter_ = other.filter_;
bitField0_ |= 0x00000008;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
case 42:
{
filter_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000008;
break;
} // case 42
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The parent, which owns this collection of assets.
* Format:
* `projects/{project_number}/locations/{location}/corpora/{corpus}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of assets.
* Format:
* `projects/{project_number}/locations/{location}/corpora/{corpus}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of assets.
* Format:
* `projects/{project_number}/locations/{location}/corpora/{corpus}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of assets.
* Format:
* `projects/{project_number}/locations/{location}/corpora/{corpus}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The parent, which owns this collection of assets.
* Format:
* `projects/{project_number}/locations/{location}/corpora/{corpus}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* The maximum number of assets to return. The service may return fewer than
* this value.
* If unspecified, at most 50 assets will be returned.
* The maximum value is 1000; values above 1000 will be coerced to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* The maximum number of assets to return. The service may return fewer than
* this value.
* If unspecified, at most 50 assets will be returned.
* The maximum value is 1000; values above 1000 will be coerced to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* The maximum number of assets to return. The service may return fewer than
* this value.
* If unspecified, at most 50 assets will be returned.
* The maximum value is 1000; values above 1000 will be coerced to 1000.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A page token, received from a previous `ListAssets` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListAssets` must match
* the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A page token, received from a previous `ListAssets` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListAssets` must match
* the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A page token, received from a previous `ListAssets` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListAssets` must match
* the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* A page token, received from a previous `ListAssets` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListAssets` must match
* the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* A page token, received from a previous `ListAssets` call.
* Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to `ListAssets` must match
* the call that provided the page token.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
private java.lang.Object filter_ = "";
/**
*
*
* <pre>
* The filter applied to the returned list.
* Only the following filterings are supported:
* "assets_with_contents = true", which returns assets with contents uploaded;
* "assets_with_contents = false", which returns assets without contents.
* </pre>
*
* <code>string filter = 5;</code>
*
* @return The filter.
*/
public java.lang.String getFilter() {
java.lang.Object ref = filter_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
filter_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The filter applied to the returned list.
* Only the following filterings are supported:
* "assets_with_contents = true", which returns assets with contents uploaded;
* "assets_with_contents = false", which returns assets without contents.
* </pre>
*
* <code>string filter = 5;</code>
*
* @return The bytes for filter.
*/
public com.google.protobuf.ByteString getFilterBytes() {
java.lang.Object ref = filter_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
filter_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The filter applied to the returned list.
* Only the following filterings are supported:
* "assets_with_contents = true", which returns assets with contents uploaded;
* "assets_with_contents = false", which returns assets without contents.
* </pre>
*
* <code>string filter = 5;</code>
*
* @param value The filter to set.
* @return This builder for chaining.
*/
public Builder setFilter(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* The filter applied to the returned list.
* Only the following filterings are supported:
* "assets_with_contents = true", which returns assets with contents uploaded;
* "assets_with_contents = false", which returns assets without contents.
* </pre>
*
* <code>string filter = 5;</code>
*
* @return This builder for chaining.
*/
public Builder clearFilter() {
filter_ = getDefaultInstance().getFilter();
bitField0_ = (bitField0_ & ~0x00000008);
onChanged();
return this;
}
/**
*
*
* <pre>
* The filter applied to the returned list.
* Only the following filterings are supported:
* "assets_with_contents = true", which returns assets with contents uploaded;
* "assets_with_contents = false", which returns assets without contents.
* </pre>
*
* <code>string filter = 5;</code>
*
* @param value The bytes for filter to set.
* @return This builder for chaining.
*/
public Builder setFilterBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
filter_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.visionai.v1.ListAssetsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.visionai.v1.ListAssetsRequest)
private static final com.google.cloud.visionai.v1.ListAssetsRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.visionai.v1.ListAssetsRequest();
}
public static com.google.cloud.visionai.v1.ListAssetsRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListAssetsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListAssetsRequest>() {
@java.lang.Override
public ListAssetsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListAssetsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListAssetsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.visionai.v1.ListAssetsRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,906 | java-bigqueryconnection/proto-google-cloud-bigqueryconnection-v1/src/main/java/com/google/cloud/bigquery/connection/v1/ListConnectionsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/bigquery/connection/v1/connection.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.bigquery.connection.v1;
/**
*
*
* <pre>
* The response for
* [ConnectionService.ListConnections][google.cloud.bigquery.connection.v1.ConnectionService.ListConnections].
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.connection.v1.ListConnectionsResponse}
*/
public final class ListConnectionsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.bigquery.connection.v1.ListConnectionsResponse)
ListConnectionsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListConnectionsResponse.newBuilder() to construct.
private ListConnectionsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListConnectionsResponse() {
nextPageToken_ = "";
connections_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListConnectionsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.connection.v1.ConnectionOuterClass
.internal_static_google_cloud_bigquery_connection_v1_ListConnectionsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.connection.v1.ConnectionOuterClass
.internal_static_google_cloud_bigquery_connection_v1_ListConnectionsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.connection.v1.ListConnectionsResponse.class,
com.google.cloud.bigquery.connection.v1.ListConnectionsResponse.Builder.class);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Next page token.
* </pre>
*
* <code>string next_page_token = 1;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Next page token.
* </pre>
*
* <code>string next_page_token = 1;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int CONNECTIONS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.bigquery.connection.v1.Connection> connections_;
/**
*
*
* <pre>
* List of connections.
* </pre>
*
* <code>repeated .google.cloud.bigquery.connection.v1.Connection connections = 2;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.bigquery.connection.v1.Connection> getConnectionsList() {
return connections_;
}
/**
*
*
* <pre>
* List of connections.
* </pre>
*
* <code>repeated .google.cloud.bigquery.connection.v1.Connection connections = 2;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.bigquery.connection.v1.ConnectionOrBuilder>
getConnectionsOrBuilderList() {
return connections_;
}
/**
*
*
* <pre>
* List of connections.
* </pre>
*
* <code>repeated .google.cloud.bigquery.connection.v1.Connection connections = 2;</code>
*/
@java.lang.Override
public int getConnectionsCount() {
return connections_.size();
}
/**
*
*
* <pre>
* List of connections.
* </pre>
*
* <code>repeated .google.cloud.bigquery.connection.v1.Connection connections = 2;</code>
*/
@java.lang.Override
public com.google.cloud.bigquery.connection.v1.Connection getConnections(int index) {
return connections_.get(index);
}
/**
*
*
* <pre>
* List of connections.
* </pre>
*
* <code>repeated .google.cloud.bigquery.connection.v1.Connection connections = 2;</code>
*/
@java.lang.Override
public com.google.cloud.bigquery.connection.v1.ConnectionOrBuilder getConnectionsOrBuilder(
int index) {
return connections_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, nextPageToken_);
}
for (int i = 0; i < connections_.size(); i++) {
output.writeMessage(2, connections_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, nextPageToken_);
}
for (int i = 0; i < connections_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, connections_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.bigquery.connection.v1.ListConnectionsResponse)) {
return super.equals(obj);
}
com.google.cloud.bigquery.connection.v1.ListConnectionsResponse other =
(com.google.cloud.bigquery.connection.v1.ListConnectionsResponse) obj;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getConnectionsList().equals(other.getConnectionsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
if (getConnectionsCount() > 0) {
hash = (37 * hash) + CONNECTIONS_FIELD_NUMBER;
hash = (53 * hash) + getConnectionsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.bigquery.connection.v1.ListConnectionsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.connection.v1.ListConnectionsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.connection.v1.ListConnectionsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.connection.v1.ListConnectionsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.connection.v1.ListConnectionsResponse parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.connection.v1.ListConnectionsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.connection.v1.ListConnectionsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.connection.v1.ListConnectionsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.connection.v1.ListConnectionsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.connection.v1.ListConnectionsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.connection.v1.ListConnectionsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.connection.v1.ListConnectionsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.bigquery.connection.v1.ListConnectionsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The response for
* [ConnectionService.ListConnections][google.cloud.bigquery.connection.v1.ConnectionService.ListConnections].
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.connection.v1.ListConnectionsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.bigquery.connection.v1.ListConnectionsResponse)
com.google.cloud.bigquery.connection.v1.ListConnectionsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.connection.v1.ConnectionOuterClass
.internal_static_google_cloud_bigquery_connection_v1_ListConnectionsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.connection.v1.ConnectionOuterClass
.internal_static_google_cloud_bigquery_connection_v1_ListConnectionsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.connection.v1.ListConnectionsResponse.class,
com.google.cloud.bigquery.connection.v1.ListConnectionsResponse.Builder.class);
}
// Construct using com.google.cloud.bigquery.connection.v1.ListConnectionsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
nextPageToken_ = "";
if (connectionsBuilder_ == null) {
connections_ = java.util.Collections.emptyList();
} else {
connections_ = null;
connectionsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.bigquery.connection.v1.ConnectionOuterClass
.internal_static_google_cloud_bigquery_connection_v1_ListConnectionsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.bigquery.connection.v1.ListConnectionsResponse
getDefaultInstanceForType() {
return com.google.cloud.bigquery.connection.v1.ListConnectionsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.bigquery.connection.v1.ListConnectionsResponse build() {
com.google.cloud.bigquery.connection.v1.ListConnectionsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.bigquery.connection.v1.ListConnectionsResponse buildPartial() {
com.google.cloud.bigquery.connection.v1.ListConnectionsResponse result =
new com.google.cloud.bigquery.connection.v1.ListConnectionsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.bigquery.connection.v1.ListConnectionsResponse result) {
if (connectionsBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
connections_ = java.util.Collections.unmodifiableList(connections_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.connections_ = connections_;
} else {
result.connections_ = connectionsBuilder_.build();
}
}
private void buildPartial0(
com.google.cloud.bigquery.connection.v1.ListConnectionsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.bigquery.connection.v1.ListConnectionsResponse) {
return mergeFrom((com.google.cloud.bigquery.connection.v1.ListConnectionsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.bigquery.connection.v1.ListConnectionsResponse other) {
if (other
== com.google.cloud.bigquery.connection.v1.ListConnectionsResponse.getDefaultInstance())
return this;
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000001;
onChanged();
}
if (connectionsBuilder_ == null) {
if (!other.connections_.isEmpty()) {
if (connections_.isEmpty()) {
connections_ = other.connections_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureConnectionsIsMutable();
connections_.addAll(other.connections_);
}
onChanged();
}
} else {
if (!other.connections_.isEmpty()) {
if (connectionsBuilder_.isEmpty()) {
connectionsBuilder_.dispose();
connectionsBuilder_ = null;
connections_ = other.connections_;
bitField0_ = (bitField0_ & ~0x00000002);
connectionsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getConnectionsFieldBuilder()
: null;
} else {
connectionsBuilder_.addAllMessages(other.connections_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
com.google.cloud.bigquery.connection.v1.Connection m =
input.readMessage(
com.google.cloud.bigquery.connection.v1.Connection.parser(),
extensionRegistry);
if (connectionsBuilder_ == null) {
ensureConnectionsIsMutable();
connections_.add(m);
} else {
connectionsBuilder_.addMessage(m);
}
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Next page token.
* </pre>
*
* <code>string next_page_token = 1;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Next page token.
* </pre>
*
* <code>string next_page_token = 1;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Next page token.
* </pre>
*
* <code>string next_page_token = 1;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Next page token.
* </pre>
*
* <code>string next_page_token = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Next page token.
* </pre>
*
* <code>string next_page_token = 1;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.util.List<com.google.cloud.bigquery.connection.v1.Connection> connections_ =
java.util.Collections.emptyList();
private void ensureConnectionsIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
connections_ =
new java.util.ArrayList<com.google.cloud.bigquery.connection.v1.Connection>(
connections_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.bigquery.connection.v1.Connection,
com.google.cloud.bigquery.connection.v1.Connection.Builder,
com.google.cloud.bigquery.connection.v1.ConnectionOrBuilder>
connectionsBuilder_;
/**
*
*
* <pre>
* List of connections.
* </pre>
*
* <code>repeated .google.cloud.bigquery.connection.v1.Connection connections = 2;</code>
*/
public java.util.List<com.google.cloud.bigquery.connection.v1.Connection> getConnectionsList() {
if (connectionsBuilder_ == null) {
return java.util.Collections.unmodifiableList(connections_);
} else {
return connectionsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* List of connections.
* </pre>
*
* <code>repeated .google.cloud.bigquery.connection.v1.Connection connections = 2;</code>
*/
public int getConnectionsCount() {
if (connectionsBuilder_ == null) {
return connections_.size();
} else {
return connectionsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* List of connections.
* </pre>
*
* <code>repeated .google.cloud.bigquery.connection.v1.Connection connections = 2;</code>
*/
public com.google.cloud.bigquery.connection.v1.Connection getConnections(int index) {
if (connectionsBuilder_ == null) {
return connections_.get(index);
} else {
return connectionsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* List of connections.
* </pre>
*
* <code>repeated .google.cloud.bigquery.connection.v1.Connection connections = 2;</code>
*/
public Builder setConnections(
int index, com.google.cloud.bigquery.connection.v1.Connection value) {
if (connectionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureConnectionsIsMutable();
connections_.set(index, value);
onChanged();
} else {
connectionsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of connections.
* </pre>
*
* <code>repeated .google.cloud.bigquery.connection.v1.Connection connections = 2;</code>
*/
public Builder setConnections(
int index, com.google.cloud.bigquery.connection.v1.Connection.Builder builderForValue) {
if (connectionsBuilder_ == null) {
ensureConnectionsIsMutable();
connections_.set(index, builderForValue.build());
onChanged();
} else {
connectionsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of connections.
* </pre>
*
* <code>repeated .google.cloud.bigquery.connection.v1.Connection connections = 2;</code>
*/
public Builder addConnections(com.google.cloud.bigquery.connection.v1.Connection value) {
if (connectionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureConnectionsIsMutable();
connections_.add(value);
onChanged();
} else {
connectionsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* List of connections.
* </pre>
*
* <code>repeated .google.cloud.bigquery.connection.v1.Connection connections = 2;</code>
*/
public Builder addConnections(
int index, com.google.cloud.bigquery.connection.v1.Connection value) {
if (connectionsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureConnectionsIsMutable();
connections_.add(index, value);
onChanged();
} else {
connectionsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* List of connections.
* </pre>
*
* <code>repeated .google.cloud.bigquery.connection.v1.Connection connections = 2;</code>
*/
public Builder addConnections(
com.google.cloud.bigquery.connection.v1.Connection.Builder builderForValue) {
if (connectionsBuilder_ == null) {
ensureConnectionsIsMutable();
connections_.add(builderForValue.build());
onChanged();
} else {
connectionsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of connections.
* </pre>
*
* <code>repeated .google.cloud.bigquery.connection.v1.Connection connections = 2;</code>
*/
public Builder addConnections(
int index, com.google.cloud.bigquery.connection.v1.Connection.Builder builderForValue) {
if (connectionsBuilder_ == null) {
ensureConnectionsIsMutable();
connections_.add(index, builderForValue.build());
onChanged();
} else {
connectionsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* List of connections.
* </pre>
*
* <code>repeated .google.cloud.bigquery.connection.v1.Connection connections = 2;</code>
*/
public Builder addAllConnections(
java.lang.Iterable<? extends com.google.cloud.bigquery.connection.v1.Connection> values) {
if (connectionsBuilder_ == null) {
ensureConnectionsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, connections_);
onChanged();
} else {
connectionsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* List of connections.
* </pre>
*
* <code>repeated .google.cloud.bigquery.connection.v1.Connection connections = 2;</code>
*/
public Builder clearConnections() {
if (connectionsBuilder_ == null) {
connections_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
connectionsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* List of connections.
* </pre>
*
* <code>repeated .google.cloud.bigquery.connection.v1.Connection connections = 2;</code>
*/
public Builder removeConnections(int index) {
if (connectionsBuilder_ == null) {
ensureConnectionsIsMutable();
connections_.remove(index);
onChanged();
} else {
connectionsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* List of connections.
* </pre>
*
* <code>repeated .google.cloud.bigquery.connection.v1.Connection connections = 2;</code>
*/
public com.google.cloud.bigquery.connection.v1.Connection.Builder getConnectionsBuilder(
int index) {
return getConnectionsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* List of connections.
* </pre>
*
* <code>repeated .google.cloud.bigquery.connection.v1.Connection connections = 2;</code>
*/
public com.google.cloud.bigquery.connection.v1.ConnectionOrBuilder getConnectionsOrBuilder(
int index) {
if (connectionsBuilder_ == null) {
return connections_.get(index);
} else {
return connectionsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* List of connections.
* </pre>
*
* <code>repeated .google.cloud.bigquery.connection.v1.Connection connections = 2;</code>
*/
public java.util.List<? extends com.google.cloud.bigquery.connection.v1.ConnectionOrBuilder>
getConnectionsOrBuilderList() {
if (connectionsBuilder_ != null) {
return connectionsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(connections_);
}
}
/**
*
*
* <pre>
* List of connections.
* </pre>
*
* <code>repeated .google.cloud.bigquery.connection.v1.Connection connections = 2;</code>
*/
public com.google.cloud.bigquery.connection.v1.Connection.Builder addConnectionsBuilder() {
return getConnectionsFieldBuilder()
.addBuilder(com.google.cloud.bigquery.connection.v1.Connection.getDefaultInstance());
}
/**
*
*
* <pre>
* List of connections.
* </pre>
*
* <code>repeated .google.cloud.bigquery.connection.v1.Connection connections = 2;</code>
*/
public com.google.cloud.bigquery.connection.v1.Connection.Builder addConnectionsBuilder(
int index) {
return getConnectionsFieldBuilder()
.addBuilder(
index, com.google.cloud.bigquery.connection.v1.Connection.getDefaultInstance());
}
/**
*
*
* <pre>
* List of connections.
* </pre>
*
* <code>repeated .google.cloud.bigquery.connection.v1.Connection connections = 2;</code>
*/
public java.util.List<com.google.cloud.bigquery.connection.v1.Connection.Builder>
getConnectionsBuilderList() {
return getConnectionsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.bigquery.connection.v1.Connection,
com.google.cloud.bigquery.connection.v1.Connection.Builder,
com.google.cloud.bigquery.connection.v1.ConnectionOrBuilder>
getConnectionsFieldBuilder() {
if (connectionsBuilder_ == null) {
connectionsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.bigquery.connection.v1.Connection,
com.google.cloud.bigquery.connection.v1.Connection.Builder,
com.google.cloud.bigquery.connection.v1.ConnectionOrBuilder>(
connections_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean());
connections_ = null;
}
return connectionsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.bigquery.connection.v1.ListConnectionsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.bigquery.connection.v1.ListConnectionsResponse)
private static final com.google.cloud.bigquery.connection.v1.ListConnectionsResponse
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.bigquery.connection.v1.ListConnectionsResponse();
}
public static com.google.cloud.bigquery.connection.v1.ListConnectionsResponse
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListConnectionsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListConnectionsResponse>() {
@java.lang.Override
public ListConnectionsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListConnectionsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListConnectionsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.bigquery.connection.v1.ListConnectionsResponse
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,171 | java-cloudcontrolspartner/google-cloud-cloudcontrolspartner/src/main/java/com/google/cloud/cloudcontrolspartner/v1beta/stub/CloudControlsPartnerCoreStubSettings.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.cloudcontrolspartner.v1beta.stub;
import static com.google.cloud.cloudcontrolspartner.v1beta.CloudControlsPartnerCoreClient.ListAccessApprovalRequestsPagedResponse;
import static com.google.cloud.cloudcontrolspartner.v1beta.CloudControlsPartnerCoreClient.ListCustomersPagedResponse;
import static com.google.cloud.cloudcontrolspartner.v1beta.CloudControlsPartnerCoreClient.ListWorkloadsPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.BetaApi;
import com.google.api.core.ObsoleteApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.GrpcTransportChannel;
import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider;
import com.google.api.gax.httpjson.GaxHttpJsonProperties;
import com.google.api.gax.httpjson.HttpJsonTransportChannel;
import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.PagedListDescriptor;
import com.google.api.gax.rpc.PagedListResponseFactory;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.cloudcontrolspartner.v1beta.AccessApprovalRequest;
import com.google.cloud.cloudcontrolspartner.v1beta.CreateCustomerRequest;
import com.google.cloud.cloudcontrolspartner.v1beta.Customer;
import com.google.cloud.cloudcontrolspartner.v1beta.DeleteCustomerRequest;
import com.google.cloud.cloudcontrolspartner.v1beta.EkmConnections;
import com.google.cloud.cloudcontrolspartner.v1beta.GetCustomerRequest;
import com.google.cloud.cloudcontrolspartner.v1beta.GetEkmConnectionsRequest;
import com.google.cloud.cloudcontrolspartner.v1beta.GetPartnerPermissionsRequest;
import com.google.cloud.cloudcontrolspartner.v1beta.GetPartnerRequest;
import com.google.cloud.cloudcontrolspartner.v1beta.GetWorkloadRequest;
import com.google.cloud.cloudcontrolspartner.v1beta.ListAccessApprovalRequestsRequest;
import com.google.cloud.cloudcontrolspartner.v1beta.ListAccessApprovalRequestsResponse;
import com.google.cloud.cloudcontrolspartner.v1beta.ListCustomersRequest;
import com.google.cloud.cloudcontrolspartner.v1beta.ListCustomersResponse;
import com.google.cloud.cloudcontrolspartner.v1beta.ListWorkloadsRequest;
import com.google.cloud.cloudcontrolspartner.v1beta.ListWorkloadsResponse;
import com.google.cloud.cloudcontrolspartner.v1beta.Partner;
import com.google.cloud.cloudcontrolspartner.v1beta.PartnerPermissions;
import com.google.cloud.cloudcontrolspartner.v1beta.UpdateCustomerRequest;
import com.google.cloud.cloudcontrolspartner.v1beta.Workload;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import com.google.protobuf.Empty;
import java.io.IOException;
import java.time.Duration;
import java.util.List;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link CloudControlsPartnerCoreStub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (cloudcontrolspartner.googleapis.com) and default port (443)
* are used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the
* [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings)
* of getWorkload:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* CloudControlsPartnerCoreStubSettings.Builder cloudControlsPartnerCoreSettingsBuilder =
* CloudControlsPartnerCoreStubSettings.newBuilder();
* cloudControlsPartnerCoreSettingsBuilder
* .getWorkloadSettings()
* .setRetrySettings(
* cloudControlsPartnerCoreSettingsBuilder
* .getWorkloadSettings()
* .getRetrySettings()
* .toBuilder()
* .setInitialRetryDelayDuration(Duration.ofSeconds(1))
* .setInitialRpcTimeoutDuration(Duration.ofSeconds(5))
* .setMaxAttempts(5)
* .setMaxRetryDelayDuration(Duration.ofSeconds(30))
* .setMaxRpcTimeoutDuration(Duration.ofSeconds(60))
* .setRetryDelayMultiplier(1.3)
* .setRpcTimeoutMultiplier(1.5)
* .setTotalTimeoutDuration(Duration.ofSeconds(300))
* .build());
* CloudControlsPartnerCoreStubSettings cloudControlsPartnerCoreSettings =
* cloudControlsPartnerCoreSettingsBuilder.build();
* }</pre>
*
* Please refer to the [Client Side Retry
* Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for
* additional support in setting retries.
*/
@BetaApi
@Generated("by gapic-generator-java")
public class CloudControlsPartnerCoreStubSettings
extends StubSettings<CloudControlsPartnerCoreStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder().add("https://www.googleapis.com/auth/cloud-platform").build();
private final UnaryCallSettings<GetWorkloadRequest, Workload> getWorkloadSettings;
private final PagedCallSettings<
ListWorkloadsRequest, ListWorkloadsResponse, ListWorkloadsPagedResponse>
listWorkloadsSettings;
private final UnaryCallSettings<GetCustomerRequest, Customer> getCustomerSettings;
private final PagedCallSettings<
ListCustomersRequest, ListCustomersResponse, ListCustomersPagedResponse>
listCustomersSettings;
private final UnaryCallSettings<GetEkmConnectionsRequest, EkmConnections>
getEkmConnectionsSettings;
private final UnaryCallSettings<GetPartnerPermissionsRequest, PartnerPermissions>
getPartnerPermissionsSettings;
private final PagedCallSettings<
ListAccessApprovalRequestsRequest,
ListAccessApprovalRequestsResponse,
ListAccessApprovalRequestsPagedResponse>
listAccessApprovalRequestsSettings;
private final UnaryCallSettings<GetPartnerRequest, Partner> getPartnerSettings;
private final UnaryCallSettings<CreateCustomerRequest, Customer> createCustomerSettings;
private final UnaryCallSettings<UpdateCustomerRequest, Customer> updateCustomerSettings;
private final UnaryCallSettings<DeleteCustomerRequest, Empty> deleteCustomerSettings;
private static final PagedListDescriptor<ListWorkloadsRequest, ListWorkloadsResponse, Workload>
LIST_WORKLOADS_PAGE_STR_DESC =
new PagedListDescriptor<ListWorkloadsRequest, ListWorkloadsResponse, Workload>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListWorkloadsRequest injectToken(ListWorkloadsRequest payload, String token) {
return ListWorkloadsRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListWorkloadsRequest injectPageSize(ListWorkloadsRequest payload, int pageSize) {
return ListWorkloadsRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListWorkloadsRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListWorkloadsResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Workload> extractResources(ListWorkloadsResponse payload) {
return payload.getWorkloadsList();
}
};
private static final PagedListDescriptor<ListCustomersRequest, ListCustomersResponse, Customer>
LIST_CUSTOMERS_PAGE_STR_DESC =
new PagedListDescriptor<ListCustomersRequest, ListCustomersResponse, Customer>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListCustomersRequest injectToken(ListCustomersRequest payload, String token) {
return ListCustomersRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListCustomersRequest injectPageSize(ListCustomersRequest payload, int pageSize) {
return ListCustomersRequest.newBuilder(payload).setPageSize(pageSize).build();
}
@Override
public Integer extractPageSize(ListCustomersRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListCustomersResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Customer> extractResources(ListCustomersResponse payload) {
return payload.getCustomersList();
}
};
private static final PagedListDescriptor<
ListAccessApprovalRequestsRequest,
ListAccessApprovalRequestsResponse,
AccessApprovalRequest>
LIST_ACCESS_APPROVAL_REQUESTS_PAGE_STR_DESC =
new PagedListDescriptor<
ListAccessApprovalRequestsRequest,
ListAccessApprovalRequestsResponse,
AccessApprovalRequest>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListAccessApprovalRequestsRequest injectToken(
ListAccessApprovalRequestsRequest payload, String token) {
return ListAccessApprovalRequestsRequest.newBuilder(payload)
.setPageToken(token)
.build();
}
@Override
public ListAccessApprovalRequestsRequest injectPageSize(
ListAccessApprovalRequestsRequest payload, int pageSize) {
return ListAccessApprovalRequestsRequest.newBuilder(payload)
.setPageSize(pageSize)
.build();
}
@Override
public Integer extractPageSize(ListAccessApprovalRequestsRequest payload) {
return payload.getPageSize();
}
@Override
public String extractNextToken(ListAccessApprovalRequestsResponse payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<AccessApprovalRequest> extractResources(
ListAccessApprovalRequestsResponse payload) {
return payload.getAccessApprovalRequestsList();
}
};
private static final PagedListResponseFactory<
ListWorkloadsRequest, ListWorkloadsResponse, ListWorkloadsPagedResponse>
LIST_WORKLOADS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListWorkloadsRequest, ListWorkloadsResponse, ListWorkloadsPagedResponse>() {
@Override
public ApiFuture<ListWorkloadsPagedResponse> getFuturePagedResponse(
UnaryCallable<ListWorkloadsRequest, ListWorkloadsResponse> callable,
ListWorkloadsRequest request,
ApiCallContext context,
ApiFuture<ListWorkloadsResponse> futureResponse) {
PageContext<ListWorkloadsRequest, ListWorkloadsResponse, Workload> pageContext =
PageContext.create(callable, LIST_WORKLOADS_PAGE_STR_DESC, request, context);
return ListWorkloadsPagedResponse.createAsync(pageContext, futureResponse);
}
};
private static final PagedListResponseFactory<
ListCustomersRequest, ListCustomersResponse, ListCustomersPagedResponse>
LIST_CUSTOMERS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListCustomersRequest, ListCustomersResponse, ListCustomersPagedResponse>() {
@Override
public ApiFuture<ListCustomersPagedResponse> getFuturePagedResponse(
UnaryCallable<ListCustomersRequest, ListCustomersResponse> callable,
ListCustomersRequest request,
ApiCallContext context,
ApiFuture<ListCustomersResponse> futureResponse) {
PageContext<ListCustomersRequest, ListCustomersResponse, Customer> pageContext =
PageContext.create(callable, LIST_CUSTOMERS_PAGE_STR_DESC, request, context);
return ListCustomersPagedResponse.createAsync(pageContext, futureResponse);
}
};
private static final PagedListResponseFactory<
ListAccessApprovalRequestsRequest,
ListAccessApprovalRequestsResponse,
ListAccessApprovalRequestsPagedResponse>
LIST_ACCESS_APPROVAL_REQUESTS_PAGE_STR_FACT =
new PagedListResponseFactory<
ListAccessApprovalRequestsRequest,
ListAccessApprovalRequestsResponse,
ListAccessApprovalRequestsPagedResponse>() {
@Override
public ApiFuture<ListAccessApprovalRequestsPagedResponse> getFuturePagedResponse(
UnaryCallable<ListAccessApprovalRequestsRequest, ListAccessApprovalRequestsResponse>
callable,
ListAccessApprovalRequestsRequest request,
ApiCallContext context,
ApiFuture<ListAccessApprovalRequestsResponse> futureResponse) {
PageContext<
ListAccessApprovalRequestsRequest,
ListAccessApprovalRequestsResponse,
AccessApprovalRequest>
pageContext =
PageContext.create(
callable, LIST_ACCESS_APPROVAL_REQUESTS_PAGE_STR_DESC, request, context);
return ListAccessApprovalRequestsPagedResponse.createAsync(
pageContext, futureResponse);
}
};
/** Returns the object with the settings used for calls to getWorkload. */
public UnaryCallSettings<GetWorkloadRequest, Workload> getWorkloadSettings() {
return getWorkloadSettings;
}
/** Returns the object with the settings used for calls to listWorkloads. */
public PagedCallSettings<ListWorkloadsRequest, ListWorkloadsResponse, ListWorkloadsPagedResponse>
listWorkloadsSettings() {
return listWorkloadsSettings;
}
/** Returns the object with the settings used for calls to getCustomer. */
public UnaryCallSettings<GetCustomerRequest, Customer> getCustomerSettings() {
return getCustomerSettings;
}
/** Returns the object with the settings used for calls to listCustomers. */
public PagedCallSettings<ListCustomersRequest, ListCustomersResponse, ListCustomersPagedResponse>
listCustomersSettings() {
return listCustomersSettings;
}
/** Returns the object with the settings used for calls to getEkmConnections. */
public UnaryCallSettings<GetEkmConnectionsRequest, EkmConnections> getEkmConnectionsSettings() {
return getEkmConnectionsSettings;
}
/** Returns the object with the settings used for calls to getPartnerPermissions. */
public UnaryCallSettings<GetPartnerPermissionsRequest, PartnerPermissions>
getPartnerPermissionsSettings() {
return getPartnerPermissionsSettings;
}
/**
* Returns the object with the settings used for calls to listAccessApprovalRequests.
*
* @deprecated This method is deprecated and will be removed in the next major version update.
*/
@Deprecated
public PagedCallSettings<
ListAccessApprovalRequestsRequest,
ListAccessApprovalRequestsResponse,
ListAccessApprovalRequestsPagedResponse>
listAccessApprovalRequestsSettings() {
return listAccessApprovalRequestsSettings;
}
/** Returns the object with the settings used for calls to getPartner. */
public UnaryCallSettings<GetPartnerRequest, Partner> getPartnerSettings() {
return getPartnerSettings;
}
/** Returns the object with the settings used for calls to createCustomer. */
public UnaryCallSettings<CreateCustomerRequest, Customer> createCustomerSettings() {
return createCustomerSettings;
}
/** Returns the object with the settings used for calls to updateCustomer. */
public UnaryCallSettings<UpdateCustomerRequest, Customer> updateCustomerSettings() {
return updateCustomerSettings;
}
/** Returns the object with the settings used for calls to deleteCustomer. */
public UnaryCallSettings<DeleteCustomerRequest, Empty> deleteCustomerSettings() {
return deleteCustomerSettings;
}
public CloudControlsPartnerCoreStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(GrpcTransportChannel.getGrpcTransportName())) {
return GrpcCloudControlsPartnerCoreStub.create(this);
}
if (getTransportChannelProvider()
.getTransportName()
.equals(HttpJsonTransportChannel.getHttpJsonTransportName())) {
return HttpJsonCloudControlsPartnerCoreStub.create(this);
}
throw new UnsupportedOperationException(
String.format(
"Transport not supported: %s", getTransportChannelProvider().getTransportName()));
}
/** Returns the default service name. */
@Override
public String getServiceName() {
return "cloudcontrolspartner";
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
@ObsoleteApi("Use getEndpoint() instead")
public static String getDefaultEndpoint() {
return "cloudcontrolspartner.googleapis.com:443";
}
/** Returns the default mTLS service endpoint. */
public static String getDefaultMtlsEndpoint() {
return "cloudcontrolspartner.mtls.googleapis.com:443";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder()
.setScopesToApply(DEFAULT_SERVICE_SCOPES)
.setUseJwtAccessWithScope(true);
}
/** Returns a builder for the default gRPC ChannelProvider for this service. */
public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() {
return InstantiatingGrpcChannelProvider.newBuilder()
.setMaxInboundMessageSize(Integer.MAX_VALUE);
}
/** Returns a builder for the default REST ChannelProvider for this service. */
@BetaApi
public static InstantiatingHttpJsonChannelProvider.Builder
defaultHttpJsonTransportProviderBuilder() {
return InstantiatingHttpJsonChannelProvider.newBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultGrpcTransportProviderBuilder().build();
}
public static ApiClientHeaderProvider.Builder defaultGrpcApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(CloudControlsPartnerCoreStubSettings.class))
.setTransportToken(
GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion());
}
public static ApiClientHeaderProvider.Builder defaultHttpJsonApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(CloudControlsPartnerCoreStubSettings.class))
.setTransportToken(
GaxHttpJsonProperties.getHttpJsonTokenName(),
GaxHttpJsonProperties.getHttpJsonVersion());
}
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return CloudControlsPartnerCoreStubSettings.defaultGrpcApiClientHeaderProviderBuilder();
}
/** Returns a new gRPC builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new REST builder for this class. */
public static Builder newHttpJsonBuilder() {
return Builder.createHttpJsonDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected CloudControlsPartnerCoreStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
getWorkloadSettings = settingsBuilder.getWorkloadSettings().build();
listWorkloadsSettings = settingsBuilder.listWorkloadsSettings().build();
getCustomerSettings = settingsBuilder.getCustomerSettings().build();
listCustomersSettings = settingsBuilder.listCustomersSettings().build();
getEkmConnectionsSettings = settingsBuilder.getEkmConnectionsSettings().build();
getPartnerPermissionsSettings = settingsBuilder.getPartnerPermissionsSettings().build();
listAccessApprovalRequestsSettings =
settingsBuilder.listAccessApprovalRequestsSettings().build();
getPartnerSettings = settingsBuilder.getPartnerSettings().build();
createCustomerSettings = settingsBuilder.createCustomerSettings().build();
updateCustomerSettings = settingsBuilder.updateCustomerSettings().build();
deleteCustomerSettings = settingsBuilder.deleteCustomerSettings().build();
}
/** Builder for CloudControlsPartnerCoreStubSettings. */
public static class Builder
extends StubSettings.Builder<CloudControlsPartnerCoreStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final UnaryCallSettings.Builder<GetWorkloadRequest, Workload> getWorkloadSettings;
private final PagedCallSettings.Builder<
ListWorkloadsRequest, ListWorkloadsResponse, ListWorkloadsPagedResponse>
listWorkloadsSettings;
private final UnaryCallSettings.Builder<GetCustomerRequest, Customer> getCustomerSettings;
private final PagedCallSettings.Builder<
ListCustomersRequest, ListCustomersResponse, ListCustomersPagedResponse>
listCustomersSettings;
private final UnaryCallSettings.Builder<GetEkmConnectionsRequest, EkmConnections>
getEkmConnectionsSettings;
private final UnaryCallSettings.Builder<GetPartnerPermissionsRequest, PartnerPermissions>
getPartnerPermissionsSettings;
private final PagedCallSettings.Builder<
ListAccessApprovalRequestsRequest,
ListAccessApprovalRequestsResponse,
ListAccessApprovalRequestsPagedResponse>
listAccessApprovalRequestsSettings;
private final UnaryCallSettings.Builder<GetPartnerRequest, Partner> getPartnerSettings;
private final UnaryCallSettings.Builder<CreateCustomerRequest, Customer> createCustomerSettings;
private final UnaryCallSettings.Builder<UpdateCustomerRequest, Customer> updateCustomerSettings;
private final UnaryCallSettings.Builder<DeleteCustomerRequest, Empty> deleteCustomerSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put(
"retry_policy_0_codes",
ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList(StatusCode.Code.UNAVAILABLE)));
definitions.put("no_retry_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings =
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(1000L))
.setRetryDelayMultiplier(1.3)
.setMaxRetryDelayDuration(Duration.ofMillis(10000L))
.setInitialRpcTimeoutDuration(Duration.ofMillis(60000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(60000L))
.setTotalTimeoutDuration(Duration.ofMillis(60000L))
.build();
definitions.put("retry_policy_0_params", settings);
settings = RetrySettings.newBuilder().setRpcTimeoutMultiplier(1.0).build();
definitions.put("no_retry_params", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(clientContext);
getWorkloadSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
listWorkloadsSettings = PagedCallSettings.newBuilder(LIST_WORKLOADS_PAGE_STR_FACT);
getCustomerSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
listCustomersSettings = PagedCallSettings.newBuilder(LIST_CUSTOMERS_PAGE_STR_FACT);
getEkmConnectionsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
getPartnerPermissionsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
listAccessApprovalRequestsSettings =
PagedCallSettings.newBuilder(LIST_ACCESS_APPROVAL_REQUESTS_PAGE_STR_FACT);
getPartnerSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
createCustomerSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
updateCustomerSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
deleteCustomerSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
getWorkloadSettings,
listWorkloadsSettings,
getCustomerSettings,
listCustomersSettings,
getEkmConnectionsSettings,
getPartnerPermissionsSettings,
listAccessApprovalRequestsSettings,
getPartnerSettings,
createCustomerSettings,
updateCustomerSettings,
deleteCustomerSettings);
initDefaults(this);
}
protected Builder(CloudControlsPartnerCoreStubSettings settings) {
super(settings);
getWorkloadSettings = settings.getWorkloadSettings.toBuilder();
listWorkloadsSettings = settings.listWorkloadsSettings.toBuilder();
getCustomerSettings = settings.getCustomerSettings.toBuilder();
listCustomersSettings = settings.listCustomersSettings.toBuilder();
getEkmConnectionsSettings = settings.getEkmConnectionsSettings.toBuilder();
getPartnerPermissionsSettings = settings.getPartnerPermissionsSettings.toBuilder();
listAccessApprovalRequestsSettings = settings.listAccessApprovalRequestsSettings.toBuilder();
getPartnerSettings = settings.getPartnerSettings.toBuilder();
createCustomerSettings = settings.createCustomerSettings.toBuilder();
updateCustomerSettings = settings.updateCustomerSettings.toBuilder();
deleteCustomerSettings = settings.deleteCustomerSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
getWorkloadSettings,
listWorkloadsSettings,
getCustomerSettings,
listCustomersSettings,
getEkmConnectionsSettings,
getPartnerPermissionsSettings,
listAccessApprovalRequestsSettings,
getPartnerSettings,
createCustomerSettings,
updateCustomerSettings,
deleteCustomerSettings);
}
private static Builder createDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder createHttpJsonDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultHttpJsonTransportProviderBuilder().build());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultHttpJsonApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.getWorkloadSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.listWorkloadsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getCustomerSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.listCustomersSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getEkmConnectionsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getPartnerPermissionsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.listAccessApprovalRequestsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getPartnerSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.createCustomerSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.updateCustomerSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
builder
.deleteCustomerSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_params"));
return builder;
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to getWorkload. */
public UnaryCallSettings.Builder<GetWorkloadRequest, Workload> getWorkloadSettings() {
return getWorkloadSettings;
}
/** Returns the builder for the settings used for calls to listWorkloads. */
public PagedCallSettings.Builder<
ListWorkloadsRequest, ListWorkloadsResponse, ListWorkloadsPagedResponse>
listWorkloadsSettings() {
return listWorkloadsSettings;
}
/** Returns the builder for the settings used for calls to getCustomer. */
public UnaryCallSettings.Builder<GetCustomerRequest, Customer> getCustomerSettings() {
return getCustomerSettings;
}
/** Returns the builder for the settings used for calls to listCustomers. */
public PagedCallSettings.Builder<
ListCustomersRequest, ListCustomersResponse, ListCustomersPagedResponse>
listCustomersSettings() {
return listCustomersSettings;
}
/** Returns the builder for the settings used for calls to getEkmConnections. */
public UnaryCallSettings.Builder<GetEkmConnectionsRequest, EkmConnections>
getEkmConnectionsSettings() {
return getEkmConnectionsSettings;
}
/** Returns the builder for the settings used for calls to getPartnerPermissions. */
public UnaryCallSettings.Builder<GetPartnerPermissionsRequest, PartnerPermissions>
getPartnerPermissionsSettings() {
return getPartnerPermissionsSettings;
}
/**
* Returns the builder for the settings used for calls to listAccessApprovalRequests.
*
* @deprecated This method is deprecated and will be removed in the next major version update.
*/
@Deprecated
public PagedCallSettings.Builder<
ListAccessApprovalRequestsRequest,
ListAccessApprovalRequestsResponse,
ListAccessApprovalRequestsPagedResponse>
listAccessApprovalRequestsSettings() {
return listAccessApprovalRequestsSettings;
}
/** Returns the builder for the settings used for calls to getPartner. */
public UnaryCallSettings.Builder<GetPartnerRequest, Partner> getPartnerSettings() {
return getPartnerSettings;
}
/** Returns the builder for the settings used for calls to createCustomer. */
public UnaryCallSettings.Builder<CreateCustomerRequest, Customer> createCustomerSettings() {
return createCustomerSettings;
}
/** Returns the builder for the settings used for calls to updateCustomer. */
public UnaryCallSettings.Builder<UpdateCustomerRequest, Customer> updateCustomerSettings() {
return updateCustomerSettings;
}
/** Returns the builder for the settings used for calls to deleteCustomer. */
public UnaryCallSettings.Builder<DeleteCustomerRequest, Empty> deleteCustomerSettings() {
return deleteCustomerSettings;
}
@Override
public CloudControlsPartnerCoreStubSettings build() throws IOException {
return new CloudControlsPartnerCoreStubSettings(this);
}
}
}
|
oracle/nosql | 36,894 | kvmain/src/main/java/oracle/kv/impl/query/runtime/CompOpIter.java | /*-
* Copyright (C) 2011, 2025 Oracle and/or its affiliates. All rights reserved.
*
* This file was distributed by Oracle as part of a version of Oracle NoSQL
* Database made available at:
*
* http://www.oracle.com/technetwork/database/database-technologies/nosqldb/downloads/index.html
*
* Please see the LICENSE file included in the top-level directory of the
* appropriate version of Oracle NoSQL Database for a copy of the license and
* additional information.
*/
package oracle.kv.impl.query.runtime;
import java.io.DataInput;
import java.io.DataOutput;
import java.io.IOException;
import java.util.Arrays;
import java.util.Iterator;
import java.util.Map;
import java.util.Objects;
import oracle.kv.impl.api.table.ArrayValueImpl;
import oracle.kv.impl.api.table.BooleanValueImpl;
import oracle.kv.impl.api.table.DisplayFormatter;
import oracle.kv.impl.api.table.DoubleValueImpl;
import oracle.kv.impl.api.table.EnumDefImpl;
import oracle.kv.impl.api.table.EnumValueImpl;
import oracle.kv.impl.api.table.FieldValueImpl;
import oracle.kv.impl.api.table.FloatValueImpl;
import oracle.kv.impl.api.table.IntegerValueImpl;
import oracle.kv.impl.api.table.LongValueImpl;
import oracle.kv.impl.api.table.MapValueImpl;
import oracle.kv.impl.api.table.NullValueImpl;
import oracle.kv.impl.api.table.RecordDefImpl;
import oracle.kv.impl.api.table.RecordValueImpl;
import oracle.kv.impl.api.table.StringValueImpl;
import oracle.kv.impl.api.table.TimestampDefImpl;
import oracle.kv.impl.api.table.TimestampValueImpl;
import oracle.kv.impl.api.table.TupleValue;
import oracle.kv.impl.query.QueryException;
import oracle.kv.impl.query.QueryStateException;
import oracle.kv.impl.query.QueryException.Location;
import oracle.kv.impl.query.compiler.Expr;
import oracle.kv.impl.query.compiler.FuncCompOp;
import oracle.kv.impl.query.compiler.FunctionLib.FuncCode;
import oracle.kv.impl.query.compiler.SortSpec;
import oracle.kv.impl.query.types.TypeManager;
import oracle.kv.table.FieldDef.Type;
import oracle.kv.table.FieldValue;
import oracle.kv.table.NumberValue;
/**
* Iterator to implement the comparison operators
*
* boolean comp(any*, any*)
*
* Returns NULL if any operand returns NULL.
* Returns true if both operands return zero items and op is eq, ge, or le.
* Returns false if any operand returns zero or more than 1 items.
* Returns false if the items to compare are not comparable.
*/
public class CompOpIter extends PlanIter {
static public class CompResult {
public int comp;
public boolean incompatible;
public boolean haveNull;
void clear() {
comp = 0;
incompatible = false;
haveNull = false;
}
@Override
public String toString() {
return ("(comp, incompatible, haveNull) = (" +
comp + ", " + incompatible + ", " + haveNull + ")");
}
}
static private class CompIterState extends PlanIterState {
final CompResult theResult = new CompResult();
@Override
public void reset(PlanIter iter) {
super.reset(iter);
theResult.clear();
}
}
private final FuncCode theCode;
private final PlanIter theLeftOp;
private final PlanIter theRightOp;
public CompOpIter(
Expr e,
int resultReg,
FuncCode code,
PlanIter[] argIters) {
super(e, resultReg);
theCode = code;
assert(argIters.length == 2);
theLeftOp = argIters[0];
theRightOp = argIters[1];
}
/**
* FastExternalizable constructor.
*/
CompOpIter(DataInput in, short serialVersion) throws IOException {
super(in, serialVersion);
short ordinal = readOrdinal(in, FuncCode.VALUES_COUNT);
theCode = FuncCode.valueOf(ordinal);
theLeftOp = deserializeIter(in, serialVersion);
theRightOp = deserializeIter(in, serialVersion);
}
/**
* FastExternalizable writer. Must call superclass method first to
* write common elements.
*/
@Override
public void writeFastExternal(DataOutput out, short serialVersion)
throws IOException {
super.writeFastExternal(out, serialVersion);
out.writeShort(theCode.ordinal());
serializeIter(theLeftOp, out, serialVersion);
serializeIter(theRightOp, out, serialVersion);
}
@Override
public PlanIterKind getKind() {
return PlanIterKind.VALUE_COMPARE;
}
@Override
FuncCode getFuncCode() {
return theCode;
}
@Override
public void open(RuntimeControlBlock rcb) {
rcb.setState(theStatePos, new CompIterState());
theLeftOp.open(rcb);
theRightOp.open(rcb);
}
@Override
public boolean next(RuntimeControlBlock rcb) {
CompIterState state = (CompIterState)rcb.getState(theStatePos);
if (state.isDone()) {
return false;
}
boolean leftOpNext = theLeftOp.next(rcb);
if (leftOpNext && theLeftOp.next(rcb)) {
throw new QueryException(
"The left operand of comparison operator " +
FuncCompOp.printOp(theCode) +
" is a sequence with more than one items. Comparison " +
"operators cannot operate on sequences of more than one items.",
theLocation);
}
boolean rightOpNext = theRightOp.next(rcb);
if (rightOpNext && theRightOp.next(rcb)) {
throw new QueryException(
"The right operand of comparison operator " +
FuncCompOp.printOp(theCode) +
" is a sequence with more than one items. Comparison " +
"operators cannot operate on sequences of more than one items.",
theLocation);
}
if (!rightOpNext && !leftOpNext) {
/* both sides are empty */
state.theResult.comp = 0;
} else if (!rightOpNext || !leftOpNext) {
/* only one of the sides is empty */
if (theCode != FuncCode.OP_NEQ) {
/* this will be converted to false */
state.theResult.incompatible = true;
} else {
/* this will be converted to true */
state.theResult.comp = 1;
}
} else {
FieldValueImpl lvalue = rcb.getRegVal(theLeftOp.getResultReg());
FieldValueImpl rvalue = rcb.getRegVal(theRightOp.getResultReg());
assert(lvalue != null && rvalue != null);
compare(rcb,
lvalue,
rvalue,
theCode,
false, // forSort
state.theResult,
getLocation());
}
if (state.theResult.haveNull) {
rcb.setRegVal(theResultReg, NullValueImpl.getInstance());
state.done();
return true;
}
if (state.theResult.incompatible) {
rcb.setRegVal(theResultReg, BooleanValueImpl.falseValue);
state.done();
return true;
}
int comp = state.theResult.comp;
boolean result;
switch (theCode) {
case OP_EQ:
result = (comp == 0);
break;
case OP_NEQ:
result = (comp != 0);
break;
case OP_GT:
result = (comp > 0);
break;
case OP_GE:
result = (comp >= 0);
break;
case OP_LT:
result = (comp < 0);
break;
case OP_LE:
result = (comp <= 0);
break;
default:
throw new QueryStateException(
"Invalid operation code: " + theCode);
}
FieldValueImpl res = (result ?
BooleanValueImpl.trueValue :
BooleanValueImpl.falseValue);
rcb.setRegVal(theResultReg, res);
state.done();
return true;
}
@Override
public void reset(RuntimeControlBlock rcb) {
theLeftOp.reset(rcb);
theRightOp.reset(rcb);
PlanIterState state = rcb.getState(theStatePos);
state.reset(this);
}
@Override
public void close(RuntimeControlBlock rcb) {
PlanIterState state = rcb.getState(theStatePos);
if (state == null) {
return;
}
theLeftOp.close(rcb);
theRightOp.close(rcb);
state.close();
}
/*
* Implements a total order among all kinds of values
*/
public static int compareTotalOrder(
RuntimeControlBlock rcb,
FieldValueImpl v1,
FieldValueImpl v2,
SortSpec sortSpec,
Location loc) {
if (v1.isNull()) {
if (v2.isNull()) {
return 0;
}
return (sortSpec.theIsDesc ? -1 : 1);
}
if (v2.isNull()) {
return (sortSpec.theIsDesc ? 1 : -1);
}
Type tc1 = v1.getType();
Type tc2 = v2.getType();
switch (tc1) {
case RECORD:
switch (tc2) {
case RECORD:
return compareRecordsTotalOrder(rcb,
(RecordValueImpl)v1,
(RecordValueImpl)v2,
sortSpec, loc);
case MAP:
case ARRAY:
return (sortSpec.theIsDesc ? 1 : -1);
default:
assert(!v2.isComplex());
return (sortSpec.theIsDesc ? -1 : 1);
}
case MAP:
switch (tc2) {
case RECORD:
return (sortSpec.theIsDesc ? -1 : 1);
case MAP:
return compareMapsTotalOrder(rcb,
(MapValueImpl)v1,
(MapValueImpl)v2,
sortSpec, loc);
case ARRAY:
return (sortSpec.theIsDesc ? 1 : -1);
default:
assert(!v2.isComplex());
return (sortSpec.theIsDesc ? -1 : 1);
}
case ARRAY:
switch (tc2) {
case RECORD:
case MAP:
return (sortSpec.theIsDesc ? -1 : 1);
case ARRAY:
return compareArraysTotalOrder(rcb,
(ArrayValueImpl)v1,
(ArrayValueImpl)v2,
sortSpec, loc);
default:
assert(!v2.isComplex());
return (sortSpec.theIsDesc ? -1 : 1);
}
default:
assert(!v1.isComplex());
switch (tc2) {
case RECORD:
case MAP:
case ARRAY:
return (sortSpec.theIsDesc ? 1 : -1);
default:
assert(!v2.isComplex());
return compareAtomicsTotalOrder(v1, v2, sortSpec);
}
}
}
static int compareRecordsTotalOrder(
RuntimeControlBlock rcb,
RecordValueImpl v1,
RecordValueImpl v2,
SortSpec sortSpec,
Location loc) {
RecordDefImpl def1 = v1.getDefinition();
RecordDefImpl def2 = v2.getDefinition();
if (!def1.equals(def2)) {
int id1 = System.identityHashCode(def1);
int id2 = System.identityHashCode(def2);
return ((Integer)id1).compareTo(id2);
}
int comp = 0;
int numFields = def1.getNumFields();
for (int i = 0; i < numFields; ++i) {
comp = compareTotalOrder(rcb,
v1.get(i), v2.get(i),
sortSpec, loc);
if (comp != 0) {
return (sortSpec.theIsDesc ? -comp : comp);
}
}
return 0;
}
static int compareMapsTotalOrder(
RuntimeControlBlock rcb,
MapValueImpl v1,
MapValueImpl v2,
SortSpec sortSpec,
Location loc) {
Iterator<String> keysIter1 = v1.getFieldNames().iterator();
Iterator<String> keysIter2 = v2.getFieldNames().iterator();
int comp = 0;
while (keysIter1.hasNext() && keysIter2.hasNext()) {
String k1 = keysIter1.next();
String k2 = keysIter2.next();
comp = k1.compareTo(k2);
if (comp != 0) {
return (sortSpec.theIsDesc ? -comp : comp);
}
comp = compareTotalOrder(rcb,
v1.get(k1), v2.get(k2),
sortSpec, loc);
if (comp != 0) {
return (sortSpec.theIsDesc ? -comp : comp);
}
}
if (v1.size() == v2.size()) {
return 0;
}
if (keysIter2.hasNext()) {
return (sortSpec.theIsDesc ? 1 : -1);
}
return (sortSpec.theIsDesc ? -1 : 1);
}
static int compareArraysTotalOrder(
RuntimeControlBlock rcb,
ArrayValueImpl v1,
ArrayValueImpl v2,
SortSpec sortSpec,
Location loc) {
int minSize = Math.min(v1.size(), v2.size());
for (int i = 0; i < minSize; ++i) {
FieldValueImpl elem1 = v1.getElement(i);
FieldValueImpl elem2 = v2.getElement(i);
int comp = compareTotalOrder(rcb, elem1, elem2, sortSpec, loc);
if (comp != 0) {
return (sortSpec.theIsDesc ? -comp : comp);
}
}
if (v1.size() != minSize) {
return (sortSpec.theIsDesc ? -1 : 1);
} else if (v2.size() != minSize) {
return (sortSpec.theIsDesc ? 1 : -1);
} else {
return 0;
}
}
static int compareAtomicsTotalOrder(
FieldValueImpl v1,
FieldValueImpl v2,
SortSpec sortSpec) {
int comp = FieldValueImpl.compareAtomicsTotalOrder(v1, v2);
comp = (sortSpec.theIsDesc ? -comp : comp);
if (!sortSpec.theIsDesc && sortSpec.theNullsFirst) {
if (v1.isSpecialValue() && !v2.isSpecialValue()) {
comp = -1;
}
if (!v1.isSpecialValue() && v2.isSpecialValue()) {
comp = 1;
}
} else if (sortSpec.theIsDesc && !sortSpec.theNullsFirst) {
if (v1.isSpecialValue() && !v2.isSpecialValue()) {
comp = 1;
}
if (!v1.isSpecialValue() && v2.isSpecialValue()) {
comp = -1;
}
}
return comp;
}
/*
* Compare 2 values for the order-relation specified by the given opCode.
* If the values are complex, the method will, in general, call itself
* recursivelly on the contained values.
*
* The method retuns 3 pieces of info (inside the "res" out param):
*
* a. Whether either v0 or v1 is NULL.
* b. Whether the values are not cmparable
* c1. If both a and b are false and the operator is = or !=, an integer which is
* equal to 0 if v0 == v1, and non-0 if v0 != v1.
* c2. If both a nd b are false and the operator is >, >=, <, or <=, an integer
* which is equal to 0 if v0 == v1, greater than 0 if v0 > v1, and
* less than zero if v0 < v1.
*/
public static void compare(
RuntimeControlBlock rcb,
FieldValueImpl v0,
FieldValueImpl v1,
FuncCode opCode,
boolean forSort,
CompResult res,
Location location) {
if (rcb != null && rcb.getTraceLevel() >= 4) {
rcb.trace("Comparing values: \n" + v0 + "\n" + v1);
}
res.clear();
if (v0.isNull() || v1.isNull()) {
res.haveNull = true;
return;
}
if (v1.isJsonNull()) {
if (v0.isJsonNull()) {
res.comp = 0;
return;
}
if (opCode != FuncCode.OP_NEQ) {
/* this will be converted to false */
res.incompatible = true;
return;
}
/* this will be converted to true */
res.comp = 1;
return;
}
Type tc0 = v0.getType();
Type tc1 = v1.getType();
switch (tc0) {
case EMPTY:
if (tc1 == Type.EMPTY) {
if (opCode == FuncCode.OP_EQ ||
opCode == FuncCode.OP_GE ||
opCode == FuncCode.OP_LE) {
res.comp = 0;
} else {
res.incompatible = true;
}
return;
}
if (opCode == FuncCode.OP_NEQ) {
res.comp = 1;
} else {
res.incompatible = true;
}
return;
case INTEGER: {
switch (tc1) {
case INTEGER:
res.comp = IntegerValueImpl.compare(
((IntegerValueImpl)v0).getInt(),
((IntegerValueImpl)v1).getInt());
return;
case LONG:
res.comp = LongValueImpl.compare(
((IntegerValueImpl)v0).getLong(),
((LongValueImpl)v1).getLong());
return;
case FLOAT:
res.comp = Float.compare(
((IntegerValueImpl)v0).getInt(),
((FloatValueImpl)v1).getFloat());
if (forSort && res.comp == 0) {
res.comp = -1;
}
return;
case DOUBLE:
res.comp = Double.compare(
((IntegerValueImpl)v0).getInt(),
((DoubleValueImpl)v1).getDouble());
if (forSort && res.comp == 0) {
res.comp = -1;
}
return;
case NUMBER:
res.comp = -v1.compareTo(v0);
if (forSort && res.comp == 0) {
res.comp = -1;
}
return;
case EMPTY:
if (opCode == FuncCode.OP_NEQ) {
res.comp = 1;
} else {
res.incompatible = true;
}
return;
default:
res.incompatible = true;
return;
}
}
case LONG: {
switch (tc1) {
case INTEGER:
res.comp = LongValueImpl.compare(
((LongValueImpl)v0).getLong(),
((IntegerValueImpl)v1).getLong());
return;
case LONG:
res.comp = LongValueImpl.compare(
((LongValueImpl)v0).getLong(),
((LongValueImpl)v1).getLong());
return;
case FLOAT:
res.comp = Float.compare(
((LongValueImpl)v0).getLong(),
((FloatValueImpl)v1).getFloat());
if (forSort && res.comp == 0) {
res.comp = -1;
}
return;
case DOUBLE:
res.comp = Double.compare(
((LongValueImpl)v0).getLong(),
((DoubleValueImpl)v1).getDouble());
if (forSort && res.comp == 0) {
res.comp = -1;
}
return;
case NUMBER:
res.comp = -v1.compareTo(v0);
if (forSort && res.comp == 0) {
res.comp = -1;
}
return;
case EMPTY:
if (opCode == FuncCode.OP_NEQ) {
res.comp = 1;
} else {
res.incompatible = true;
}
return;
default:
res.incompatible = true;
return;
}
}
case FLOAT: {
switch (tc1) {
case INTEGER:
res.comp = Float.compare(
((FloatValueImpl)v0).getFloat(),
((IntegerValueImpl)v1).getInt());
if (forSort && res.comp == 0) {
res.comp = 1;
}
return;
case LONG:
res.comp = Float.compare(
((FloatValueImpl)v0).getFloat(),
((LongValueImpl)v1).getLong());
if (forSort && res.comp == 0) {
res.comp = 1;
}
return;
case FLOAT:
res.comp = Float.compare(
((FloatValueImpl)v0).getFloat(),
((FloatValueImpl)v1).getFloat());
return;
case DOUBLE:
res.comp = Double.compare(
((FloatValueImpl)v0).getDouble(),
((DoubleValueImpl)v1).getDouble());
return;
case NUMBER:
res.comp = -v1.compareTo(v0);
if (forSort && res.comp == 0) {
res.comp = -1;
}
return;
case EMPTY:
if (opCode == FuncCode.OP_NEQ) {
res.comp = 1;
} else {
res.incompatible = true;
}
return;
default:
res.incompatible = true;
return;
}
}
case DOUBLE: {
switch (tc1) {
case INTEGER:
res.comp = Double.compare(
((DoubleValueImpl)v0).getDouble(),
((IntegerValueImpl)v1).getInt());
if (forSort && res.comp == 0) {
res.comp = 1;
}
return;
case LONG:
res.comp = Double.compare(
((DoubleValueImpl)v0).getDouble(),
((LongValueImpl)v1).getLong());
if (forSort && res.comp == 0) {
res.comp = 1;
}
return;
case FLOAT:
res.comp = Double.compare(
((DoubleValueImpl)v0).getDouble(),
((FloatValueImpl)v1).getDouble());
return;
case DOUBLE:
res.comp = Double.compare(
((DoubleValueImpl)v0).getDouble(),
((DoubleValueImpl)v1).getDouble());
return;
case NUMBER:
res.comp = -v1.compareTo(v0);
if (forSort && res.comp == 0) {
res.comp = -1;
}
return;
case EMPTY:
if (opCode == FuncCode.OP_NEQ) {
res.comp = 1;
} else {
res.incompatible = true;
}
return;
default:
res.incompatible = true;
return;
}
}
case NUMBER: {
/*
* Number is comparable against any numeric type
*/
NumberValue number = (NumberValue) v0;
switch (tc1) {
case NUMBER:
case DOUBLE:
case FLOAT:
case INTEGER:
case LONG:
res.comp = number.compareTo(v1);
if (forSort && res.comp == 0) {
res.comp = 1;
}
return;
case EMPTY:
if (opCode == FuncCode.OP_NEQ) {
res.comp = 1;
} else {
res.incompatible = true;
}
return;
default:
res.incompatible = true;
return;
}
}
case STRING: {
switch (tc1) {
case STRING:
res.comp = ((StringValueImpl)v0).getString().compareTo(
((StringValueImpl)v1).getString());
return;
case ENUM:
// TODO: optimize this
FieldValueImpl enumVal = TypeManager.promote(
v0, TypeManager.createValueType(v1));
if (enumVal == null) {
res.incompatible = true;
return;
}
compareEnums(enumVal, v1, res);
return;
case TIMESTAMP:
v0 = ((TimestampDefImpl)(v1.getDefinition())).
fromString(((StringValueImpl)v0).get());
res.comp = ((TimestampValueImpl)v0).compareTo(v1);
if (rcb != null && rcb.getTraceLevel() >= 3) {
rcb.trace("Comparing STRING with TIMESTAMP: \n" + v0 + "\n" + v1);
rcb.trace("res.comp = " + res.comp);
}
return;
case EMPTY:
if (opCode == FuncCode.OP_NEQ) {
res.comp = 1;
} else {
res.incompatible = true;
}
return;
default:
res.incompatible = true;
return;
}
}
case ENUM: {
switch (tc1) {
case STRING:
FieldValueImpl enumVal = TypeManager.promote(
v1, TypeManager.createValueType(v0));
if (enumVal == null) {
res.incompatible = true;
return;
}
compareEnums(v0, enumVal, res);
return;
case ENUM:
compareEnums(v0, v1, res);
return;
case EMPTY:
if (opCode == FuncCode.OP_NEQ) {
res.comp = 1;
} else {
res.incompatible = true;
}
return;
default:
res.incompatible = true;
return;
}
}
case BOOLEAN: {
switch (tc1) {
case BOOLEAN:
res.comp = ((BooleanValueImpl)v0).compareTo(v1);
return;
case EMPTY:
if (opCode == FuncCode.OP_NEQ) {
res.comp = 1;
} else {
res.incompatible = true;
}
return;
default:
res.incompatible = true;
return;
}
}
case BINARY:
case FIXED_BINARY: {
switch (tc1) {
case BINARY:
case FIXED_BINARY:
if (opCode != FuncCode.OP_EQ && opCode != FuncCode.OP_NEQ) {
res.incompatible = true;
return;
}
res.comp = (Arrays.equals(v0.getBytes(), v1.getBytes()) ? 0 : 1);
return;
case EMPTY:
if (opCode == FuncCode.OP_NEQ) {
res.comp = 1;
} else {
res.incompatible = true;
}
return;
default:
res.incompatible = true;
return;
}
}
case TIMESTAMP: {
switch (tc1) {
case TIMESTAMP:
res.comp = ((TimestampValueImpl)v0).compareTo(v1);
return;
case STRING:
v1 = ((TimestampDefImpl)(v0.getDefinition())).
fromString(((StringValueImpl)v1).get());
res.comp = ((TimestampValueImpl)v0).compareTo(v1);
if (rcb != null && rcb.getTraceLevel() >= 3) {
rcb.trace("Comparing TIMESTAMP with STRING: \n" + v0 + "\n" + v1);
rcb.trace("res.comp = " + res.comp);
}
return;
case EMPTY:
if (opCode == FuncCode.OP_NEQ) {
res.comp = 1;
} else {
res.incompatible = true;
}
return;
default:
res.incompatible = true;
return;
}
}
case RECORD: {
switch (tc1) {
case RECORD:
if (opCode != FuncCode.OP_EQ && opCode != FuncCode.OP_NEQ) {
res.incompatible = true;
return;
}
if (v0.isTuple()) {
v0 = ((TupleValue)v0).toRecord();
}
if (v1.isTuple()) {
v1 = ((TupleValue)v1).toRecord();
}
RecordValueImpl r0 = (RecordValueImpl)v0;
RecordValueImpl r1 = (RecordValueImpl)v1;
compareRecords(rcb, r0, r1, opCode, res, location);
return;
case EMPTY:
if (opCode == FuncCode.OP_NEQ) {
res.comp = 1;
} else {
res.incompatible = true;
}
return;
default:
res.incompatible = true;
return;
}
}
case MAP: {
switch (tc1) {
case MAP:
if (opCode != FuncCode.OP_EQ && opCode != FuncCode.OP_NEQ) {
res.incompatible = true;
return;
}
MapValueImpl m0 = (MapValueImpl)v0;
MapValueImpl m1 = (MapValueImpl)v1;
compareMaps(rcb, m0, m1, opCode, res, location);
return;
case EMPTY:
if (opCode == FuncCode.OP_NEQ) {
res.comp = 1;
} else {
res.incompatible = true;
}
return;
default:
res.incompatible = true;
return;
}
}
case ARRAY: {
switch (tc1) {
case ARRAY:
ArrayValueImpl a0 = (ArrayValueImpl)v0;
ArrayValueImpl a1 = (ArrayValueImpl)v1;
if (opCode == FuncCode.OP_EQ || opCode == FuncCode.OP_NEQ) {
if (a0.size() != a1.size()) {
res.comp = 1;
return;
}
}
int minSize = Math.min(a0.size(), a1.size());
for (int i = 0; i < minSize; ++i) {
FieldValueImpl elem0 = a0.getElement(i);
FieldValueImpl elem1 = a1.getElement(i);
assert(elem0 != null);
assert(elem1 != null);
compare(rcb, elem0, elem1, opCode, forSort, res, location);
if (res.comp != 0 || res.haveNull || res.incompatible) {
return;
}
}
if (a0.size() != minSize) {
res.comp = 1;
return;
} else if (a1.size() != minSize) {
res.comp = -1;
return;
} else {
res.comp = 0;
return;
}
case EMPTY:
if (opCode == FuncCode.OP_NEQ) {
res.comp = 1;
} else {
res.incompatible = true;
}
return;
default:
res.incompatible = true;
return;
}
}
case ANY_JSON_ATOMIC: {
assert(v0.isJsonNull());
if (v1.isJsonNull()) {
res.comp = 0;
return;
}
if (opCode != FuncCode.OP_NEQ) {
/* this will be converted to false */
res.incompatible = true;
return;
}
/* this will be converted to true */
res.comp = 1;
return;
}
default:
throw new QueryStateException(
"Unexpected operand type in comparison operator: " + tc0);
}
}
static void compareMaps(
RuntimeControlBlock rcb,
MapValueImpl v0,
MapValueImpl v1,
FuncCode opCode,
CompResult res,
Location location) {
if (v0.size() != v1.size()) {
res.comp = 1;
return;
}
for (Map.Entry<String, FieldValue> e0 : v0.getMap().entrySet()) {
String k0 = e0.getKey();
FieldValueImpl fv0 = (FieldValueImpl)e0.getValue();
FieldValueImpl fv1 = v1.get(k0);
if (fv1 == null) {
res.comp = 1;
return;
}
compare(rcb, fv0, fv1, opCode, false, res, location);
if (res.comp != 0 || res.haveNull || res.incompatible) {
return;
}
}
res.comp = 0;
return;
}
static void compareRecords(
RuntimeControlBlock rcb,
RecordValueImpl v0,
RecordValueImpl v1,
FuncCode opCode,
CompResult res,
Location location) {
if (v0.getNumFields() != v1.getNumFields()) {
res.comp = 1;
return;
}
for (int i = 0; i < v0.getNumFields(); ++i) {
FieldValueImpl fv0 = v0.get(i);
FieldValueImpl fv1 = v1.get(i);
compare(rcb, fv0, fv1, opCode, false, res, location);
if (res.comp != 0 || res.haveNull || res.incompatible) {
return;
}
String k0 = v0.getFieldName(i);
String k1 = v1.getFieldName(i);
if (!k0.equalsIgnoreCase(k1)) {
res.comp = 1;
return;
}
}
res.comp = 0;
return;
}
static void compareEnums(
FieldValueImpl v0,
FieldValueImpl v1,
CompResult res) {
EnumValueImpl e0 = (EnumValueImpl)v0;
EnumValueImpl e1 = (EnumValueImpl)v1;
EnumDefImpl def0 = e0.getDefinition();
EnumDefImpl def1 = e1.getDefinition();
if (def0.valuesEqual(def1)) {
int idx0 = e0.getIndex();
int idx1 = e1.getIndex();
res.comp = ((Integer)idx0).compareTo(idx1);
return;
}
res.incompatible = true;
}
@Override
protected void displayContent(
StringBuilder sb,
DisplayFormatter formatter,
boolean verbose) {
formatter.indent(sb);
sb.append("\"left operand\" :\n");
theLeftOp.display(sb, formatter, verbose);
sb.append(",\n");
formatter.indent(sb);
sb.append("\"right operand\" :\n");
theRightOp.display(sb, formatter, verbose);
}
@Override
void displayName(StringBuilder sb) {
switch (theCode) {
case OP_GT:
sb.append("GREATER_THAN");
break;
case OP_GE:
sb.append("GREATER_OR_EQUAL");
break;
case OP_LT:
sb.append("LESS_THAN");
break;
case OP_LE:
sb.append("LESS_OR_EQUAL");
break;
case OP_EQ:
sb.append("EQUAL");
break;
case OP_NEQ:
sb.append("NOT_EQUAL");
break;
default:
break;
}
}
@Override
public boolean equals(Object obj) {
if (this == obj) {
return true;
}
if (!super.equals(obj) || !(obj instanceof CompOpIter)) {
return false;
}
final CompOpIter other = (CompOpIter) obj;
return (theCode == other.theCode) &&
Objects.equals(theLeftOp, other.theLeftOp) &&
Objects.equals(theRightOp, other.theRightOp);
}
@Override
public int hashCode() {
return Objects.hash(super.hashCode(), theCode, theLeftOp, theRightOp);
}
}
|
googleapis/google-cloud-java | 36,937 | java-netapp/proto-google-cloud-netapp-v1/src/main/java/com/google/cloud/netapp/v1/CreateQuotaRuleRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/netapp/v1/quota_rule.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.netapp.v1;
/**
*
*
* <pre>
* CreateQuotaRuleRequest for creating a quota rule.
* </pre>
*
* Protobuf type {@code google.cloud.netapp.v1.CreateQuotaRuleRequest}
*/
public final class CreateQuotaRuleRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.netapp.v1.CreateQuotaRuleRequest)
CreateQuotaRuleRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateQuotaRuleRequest.newBuilder() to construct.
private CreateQuotaRuleRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateQuotaRuleRequest() {
parent_ = "";
quotaRuleId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateQuotaRuleRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.netapp.v1.QuotaRuleProto
.internal_static_google_cloud_netapp_v1_CreateQuotaRuleRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.netapp.v1.QuotaRuleProto
.internal_static_google_cloud_netapp_v1_CreateQuotaRuleRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.netapp.v1.CreateQuotaRuleRequest.class,
com.google.cloud.netapp.v1.CreateQuotaRuleRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Parent value for CreateQuotaRuleRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. Parent value for CreateQuotaRuleRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int QUOTA_RULE_FIELD_NUMBER = 2;
private com.google.cloud.netapp.v1.QuotaRule quotaRule_;
/**
*
*
* <pre>
* Required. Fields of the to be created quota rule.
* </pre>
*
* <code>
* .google.cloud.netapp.v1.QuotaRule quota_rule = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the quotaRule field is set.
*/
@java.lang.Override
public boolean hasQuotaRule() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Fields of the to be created quota rule.
* </pre>
*
* <code>
* .google.cloud.netapp.v1.QuotaRule quota_rule = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The quotaRule.
*/
@java.lang.Override
public com.google.cloud.netapp.v1.QuotaRule getQuotaRule() {
return quotaRule_ == null
? com.google.cloud.netapp.v1.QuotaRule.getDefaultInstance()
: quotaRule_;
}
/**
*
*
* <pre>
* Required. Fields of the to be created quota rule.
* </pre>
*
* <code>
* .google.cloud.netapp.v1.QuotaRule quota_rule = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.netapp.v1.QuotaRuleOrBuilder getQuotaRuleOrBuilder() {
return quotaRule_ == null
? com.google.cloud.netapp.v1.QuotaRule.getDefaultInstance()
: quotaRule_;
}
public static final int QUOTA_RULE_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object quotaRuleId_ = "";
/**
*
*
* <pre>
* Required. ID of the quota rule to create. Must be unique within the parent
* resource. Must contain only letters, numbers, underscore and hyphen, with
* the first character a letter or underscore, the last a letter or underscore
* or a number, and a 63 character maximum.
* </pre>
*
* <code>string quota_rule_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The quotaRuleId.
*/
@java.lang.Override
public java.lang.String getQuotaRuleId() {
java.lang.Object ref = quotaRuleId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
quotaRuleId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. ID of the quota rule to create. Must be unique within the parent
* resource. Must contain only letters, numbers, underscore and hyphen, with
* the first character a letter or underscore, the last a letter or underscore
* or a number, and a 63 character maximum.
* </pre>
*
* <code>string quota_rule_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for quotaRuleId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getQuotaRuleIdBytes() {
java.lang.Object ref = quotaRuleId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
quotaRuleId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getQuotaRule());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(quotaRuleId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, quotaRuleId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getQuotaRule());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(quotaRuleId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, quotaRuleId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.netapp.v1.CreateQuotaRuleRequest)) {
return super.equals(obj);
}
com.google.cloud.netapp.v1.CreateQuotaRuleRequest other =
(com.google.cloud.netapp.v1.CreateQuotaRuleRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasQuotaRule() != other.hasQuotaRule()) return false;
if (hasQuotaRule()) {
if (!getQuotaRule().equals(other.getQuotaRule())) return false;
}
if (!getQuotaRuleId().equals(other.getQuotaRuleId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasQuotaRule()) {
hash = (37 * hash) + QUOTA_RULE_FIELD_NUMBER;
hash = (53 * hash) + getQuotaRule().hashCode();
}
hash = (37 * hash) + QUOTA_RULE_ID_FIELD_NUMBER;
hash = (53 * hash) + getQuotaRuleId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.netapp.v1.CreateQuotaRuleRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.netapp.v1.CreateQuotaRuleRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.netapp.v1.CreateQuotaRuleRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.netapp.v1.CreateQuotaRuleRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.netapp.v1.CreateQuotaRuleRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.netapp.v1.CreateQuotaRuleRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.netapp.v1.CreateQuotaRuleRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.netapp.v1.CreateQuotaRuleRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.netapp.v1.CreateQuotaRuleRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.netapp.v1.CreateQuotaRuleRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.netapp.v1.CreateQuotaRuleRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.netapp.v1.CreateQuotaRuleRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.netapp.v1.CreateQuotaRuleRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* CreateQuotaRuleRequest for creating a quota rule.
* </pre>
*
* Protobuf type {@code google.cloud.netapp.v1.CreateQuotaRuleRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.netapp.v1.CreateQuotaRuleRequest)
com.google.cloud.netapp.v1.CreateQuotaRuleRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.netapp.v1.QuotaRuleProto
.internal_static_google_cloud_netapp_v1_CreateQuotaRuleRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.netapp.v1.QuotaRuleProto
.internal_static_google_cloud_netapp_v1_CreateQuotaRuleRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.netapp.v1.CreateQuotaRuleRequest.class,
com.google.cloud.netapp.v1.CreateQuotaRuleRequest.Builder.class);
}
// Construct using com.google.cloud.netapp.v1.CreateQuotaRuleRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getQuotaRuleFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
quotaRule_ = null;
if (quotaRuleBuilder_ != null) {
quotaRuleBuilder_.dispose();
quotaRuleBuilder_ = null;
}
quotaRuleId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.netapp.v1.QuotaRuleProto
.internal_static_google_cloud_netapp_v1_CreateQuotaRuleRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.netapp.v1.CreateQuotaRuleRequest getDefaultInstanceForType() {
return com.google.cloud.netapp.v1.CreateQuotaRuleRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.netapp.v1.CreateQuotaRuleRequest build() {
com.google.cloud.netapp.v1.CreateQuotaRuleRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.netapp.v1.CreateQuotaRuleRequest buildPartial() {
com.google.cloud.netapp.v1.CreateQuotaRuleRequest result =
new com.google.cloud.netapp.v1.CreateQuotaRuleRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.netapp.v1.CreateQuotaRuleRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.quotaRule_ = quotaRuleBuilder_ == null ? quotaRule_ : quotaRuleBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.quotaRuleId_ = quotaRuleId_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.netapp.v1.CreateQuotaRuleRequest) {
return mergeFrom((com.google.cloud.netapp.v1.CreateQuotaRuleRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.netapp.v1.CreateQuotaRuleRequest other) {
if (other == com.google.cloud.netapp.v1.CreateQuotaRuleRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasQuotaRule()) {
mergeQuotaRule(other.getQuotaRule());
}
if (!other.getQuotaRuleId().isEmpty()) {
quotaRuleId_ = other.quotaRuleId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getQuotaRuleFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
quotaRuleId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. Parent value for CreateQuotaRuleRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. Parent value for CreateQuotaRuleRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. Parent value for CreateQuotaRuleRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Parent value for CreateQuotaRuleRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Parent value for CreateQuotaRuleRequest
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.netapp.v1.QuotaRule quotaRule_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.netapp.v1.QuotaRule,
com.google.cloud.netapp.v1.QuotaRule.Builder,
com.google.cloud.netapp.v1.QuotaRuleOrBuilder>
quotaRuleBuilder_;
/**
*
*
* <pre>
* Required. Fields of the to be created quota rule.
* </pre>
*
* <code>
* .google.cloud.netapp.v1.QuotaRule quota_rule = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the quotaRule field is set.
*/
public boolean hasQuotaRule() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Fields of the to be created quota rule.
* </pre>
*
* <code>
* .google.cloud.netapp.v1.QuotaRule quota_rule = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The quotaRule.
*/
public com.google.cloud.netapp.v1.QuotaRule getQuotaRule() {
if (quotaRuleBuilder_ == null) {
return quotaRule_ == null
? com.google.cloud.netapp.v1.QuotaRule.getDefaultInstance()
: quotaRule_;
} else {
return quotaRuleBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Fields of the to be created quota rule.
* </pre>
*
* <code>
* .google.cloud.netapp.v1.QuotaRule quota_rule = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setQuotaRule(com.google.cloud.netapp.v1.QuotaRule value) {
if (quotaRuleBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
quotaRule_ = value;
} else {
quotaRuleBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Fields of the to be created quota rule.
* </pre>
*
* <code>
* .google.cloud.netapp.v1.QuotaRule quota_rule = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setQuotaRule(com.google.cloud.netapp.v1.QuotaRule.Builder builderForValue) {
if (quotaRuleBuilder_ == null) {
quotaRule_ = builderForValue.build();
} else {
quotaRuleBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Fields of the to be created quota rule.
* </pre>
*
* <code>
* .google.cloud.netapp.v1.QuotaRule quota_rule = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeQuotaRule(com.google.cloud.netapp.v1.QuotaRule value) {
if (quotaRuleBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& quotaRule_ != null
&& quotaRule_ != com.google.cloud.netapp.v1.QuotaRule.getDefaultInstance()) {
getQuotaRuleBuilder().mergeFrom(value);
} else {
quotaRule_ = value;
}
} else {
quotaRuleBuilder_.mergeFrom(value);
}
if (quotaRule_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Fields of the to be created quota rule.
* </pre>
*
* <code>
* .google.cloud.netapp.v1.QuotaRule quota_rule = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearQuotaRule() {
bitField0_ = (bitField0_ & ~0x00000002);
quotaRule_ = null;
if (quotaRuleBuilder_ != null) {
quotaRuleBuilder_.dispose();
quotaRuleBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Fields of the to be created quota rule.
* </pre>
*
* <code>
* .google.cloud.netapp.v1.QuotaRule quota_rule = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.netapp.v1.QuotaRule.Builder getQuotaRuleBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getQuotaRuleFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Fields of the to be created quota rule.
* </pre>
*
* <code>
* .google.cloud.netapp.v1.QuotaRule quota_rule = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.netapp.v1.QuotaRuleOrBuilder getQuotaRuleOrBuilder() {
if (quotaRuleBuilder_ != null) {
return quotaRuleBuilder_.getMessageOrBuilder();
} else {
return quotaRule_ == null
? com.google.cloud.netapp.v1.QuotaRule.getDefaultInstance()
: quotaRule_;
}
}
/**
*
*
* <pre>
* Required. Fields of the to be created quota rule.
* </pre>
*
* <code>
* .google.cloud.netapp.v1.QuotaRule quota_rule = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.netapp.v1.QuotaRule,
com.google.cloud.netapp.v1.QuotaRule.Builder,
com.google.cloud.netapp.v1.QuotaRuleOrBuilder>
getQuotaRuleFieldBuilder() {
if (quotaRuleBuilder_ == null) {
quotaRuleBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.netapp.v1.QuotaRule,
com.google.cloud.netapp.v1.QuotaRule.Builder,
com.google.cloud.netapp.v1.QuotaRuleOrBuilder>(
getQuotaRule(), getParentForChildren(), isClean());
quotaRule_ = null;
}
return quotaRuleBuilder_;
}
private java.lang.Object quotaRuleId_ = "";
/**
*
*
* <pre>
* Required. ID of the quota rule to create. Must be unique within the parent
* resource. Must contain only letters, numbers, underscore and hyphen, with
* the first character a letter or underscore, the last a letter or underscore
* or a number, and a 63 character maximum.
* </pre>
*
* <code>string quota_rule_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The quotaRuleId.
*/
public java.lang.String getQuotaRuleId() {
java.lang.Object ref = quotaRuleId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
quotaRuleId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. ID of the quota rule to create. Must be unique within the parent
* resource. Must contain only letters, numbers, underscore and hyphen, with
* the first character a letter or underscore, the last a letter or underscore
* or a number, and a 63 character maximum.
* </pre>
*
* <code>string quota_rule_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for quotaRuleId.
*/
public com.google.protobuf.ByteString getQuotaRuleIdBytes() {
java.lang.Object ref = quotaRuleId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
quotaRuleId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. ID of the quota rule to create. Must be unique within the parent
* resource. Must contain only letters, numbers, underscore and hyphen, with
* the first character a letter or underscore, the last a letter or underscore
* or a number, and a 63 character maximum.
* </pre>
*
* <code>string quota_rule_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The quotaRuleId to set.
* @return This builder for chaining.
*/
public Builder setQuotaRuleId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
quotaRuleId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. ID of the quota rule to create. Must be unique within the parent
* resource. Must contain only letters, numbers, underscore and hyphen, with
* the first character a letter or underscore, the last a letter or underscore
* or a number, and a 63 character maximum.
* </pre>
*
* <code>string quota_rule_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearQuotaRuleId() {
quotaRuleId_ = getDefaultInstance().getQuotaRuleId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. ID of the quota rule to create. Must be unique within the parent
* resource. Must contain only letters, numbers, underscore and hyphen, with
* the first character a letter or underscore, the last a letter or underscore
* or a number, and a 63 character maximum.
* </pre>
*
* <code>string quota_rule_id = 3 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for quotaRuleId to set.
* @return This builder for chaining.
*/
public Builder setQuotaRuleIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
quotaRuleId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.netapp.v1.CreateQuotaRuleRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.netapp.v1.CreateQuotaRuleRequest)
private static final com.google.cloud.netapp.v1.CreateQuotaRuleRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.netapp.v1.CreateQuotaRuleRequest();
}
public static com.google.cloud.netapp.v1.CreateQuotaRuleRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateQuotaRuleRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateQuotaRuleRequest>() {
@java.lang.Override
public CreateQuotaRuleRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateQuotaRuleRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateQuotaRuleRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.netapp.v1.CreateQuotaRuleRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,006 | java-networkservices/proto-google-cloud-networkservices-v1/src/main/java/com/google/cloud/networkservices/v1/UpdateGrpcRouteRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/networkservices/v1/grpc_route.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.networkservices.v1;
/**
*
*
* <pre>
* Request used by the UpdateGrpcRoute method.
* </pre>
*
* Protobuf type {@code google.cloud.networkservices.v1.UpdateGrpcRouteRequest}
*/
public final class UpdateGrpcRouteRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.networkservices.v1.UpdateGrpcRouteRequest)
UpdateGrpcRouteRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateGrpcRouteRequest.newBuilder() to construct.
private UpdateGrpcRouteRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateGrpcRouteRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateGrpcRouteRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.networkservices.v1.GrpcRouteProto
.internal_static_google_cloud_networkservices_v1_UpdateGrpcRouteRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.networkservices.v1.GrpcRouteProto
.internal_static_google_cloud_networkservices_v1_UpdateGrpcRouteRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest.class,
com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest.Builder.class);
}
private int bitField0_;
public static final int UPDATE_MASK_FIELD_NUMBER = 1;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* GrpcRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* GrpcRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* GrpcRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
public static final int GRPC_ROUTE_FIELD_NUMBER = 2;
private com.google.cloud.networkservices.v1.GrpcRoute grpcRoute_;
/**
*
*
* <pre>
* Required. Updated GrpcRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.GrpcRoute grpc_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the grpcRoute field is set.
*/
@java.lang.Override
public boolean hasGrpcRoute() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Updated GrpcRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.GrpcRoute grpc_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The grpcRoute.
*/
@java.lang.Override
public com.google.cloud.networkservices.v1.GrpcRoute getGrpcRoute() {
return grpcRoute_ == null
? com.google.cloud.networkservices.v1.GrpcRoute.getDefaultInstance()
: grpcRoute_;
}
/**
*
*
* <pre>
* Required. Updated GrpcRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.GrpcRoute grpc_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.networkservices.v1.GrpcRouteOrBuilder getGrpcRouteOrBuilder() {
return grpcRoute_ == null
? com.google.cloud.networkservices.v1.GrpcRoute.getDefaultInstance()
: grpcRoute_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getGrpcRoute());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getGrpcRoute());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest)) {
return super.equals(obj);
}
com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest other =
(com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest) obj;
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (hasGrpcRoute() != other.hasGrpcRoute()) return false;
if (hasGrpcRoute()) {
if (!getGrpcRoute().equals(other.getGrpcRoute())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
if (hasGrpcRoute()) {
hash = (37 * hash) + GRPC_ROUTE_FIELD_NUMBER;
hash = (53 * hash) + getGrpcRoute().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request used by the UpdateGrpcRoute method.
* </pre>
*
* Protobuf type {@code google.cloud.networkservices.v1.UpdateGrpcRouteRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.networkservices.v1.UpdateGrpcRouteRequest)
com.google.cloud.networkservices.v1.UpdateGrpcRouteRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.networkservices.v1.GrpcRouteProto
.internal_static_google_cloud_networkservices_v1_UpdateGrpcRouteRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.networkservices.v1.GrpcRouteProto
.internal_static_google_cloud_networkservices_v1_UpdateGrpcRouteRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest.class,
com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest.Builder.class);
}
// Construct using com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getUpdateMaskFieldBuilder();
getGrpcRouteFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
grpcRoute_ = null;
if (grpcRouteBuilder_ != null) {
grpcRouteBuilder_.dispose();
grpcRouteBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.networkservices.v1.GrpcRouteProto
.internal_static_google_cloud_networkservices_v1_UpdateGrpcRouteRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest getDefaultInstanceForType() {
return com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest build() {
com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest buildPartial() {
com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest result =
new com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.grpcRoute_ = grpcRouteBuilder_ == null ? grpcRoute_ : grpcRouteBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest) {
return mergeFrom((com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest other) {
if (other == com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest.getDefaultInstance())
return this;
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
if (other.hasGrpcRoute()) {
mergeGrpcRoute(other.getGrpcRoute());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getGrpcRouteFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* GrpcRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* GrpcRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* GrpcRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* GrpcRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* GrpcRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* GrpcRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000001);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* GrpcRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* GrpcRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* GrpcRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private com.google.cloud.networkservices.v1.GrpcRoute grpcRoute_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.networkservices.v1.GrpcRoute,
com.google.cloud.networkservices.v1.GrpcRoute.Builder,
com.google.cloud.networkservices.v1.GrpcRouteOrBuilder>
grpcRouteBuilder_;
/**
*
*
* <pre>
* Required. Updated GrpcRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.GrpcRoute grpc_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the grpcRoute field is set.
*/
public boolean hasGrpcRoute() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Updated GrpcRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.GrpcRoute grpc_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The grpcRoute.
*/
public com.google.cloud.networkservices.v1.GrpcRoute getGrpcRoute() {
if (grpcRouteBuilder_ == null) {
return grpcRoute_ == null
? com.google.cloud.networkservices.v1.GrpcRoute.getDefaultInstance()
: grpcRoute_;
} else {
return grpcRouteBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Updated GrpcRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.GrpcRoute grpc_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setGrpcRoute(com.google.cloud.networkservices.v1.GrpcRoute value) {
if (grpcRouteBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
grpcRoute_ = value;
} else {
grpcRouteBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Updated GrpcRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.GrpcRoute grpc_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setGrpcRoute(
com.google.cloud.networkservices.v1.GrpcRoute.Builder builderForValue) {
if (grpcRouteBuilder_ == null) {
grpcRoute_ = builderForValue.build();
} else {
grpcRouteBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Updated GrpcRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.GrpcRoute grpc_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeGrpcRoute(com.google.cloud.networkservices.v1.GrpcRoute value) {
if (grpcRouteBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& grpcRoute_ != null
&& grpcRoute_ != com.google.cloud.networkservices.v1.GrpcRoute.getDefaultInstance()) {
getGrpcRouteBuilder().mergeFrom(value);
} else {
grpcRoute_ = value;
}
} else {
grpcRouteBuilder_.mergeFrom(value);
}
if (grpcRoute_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Updated GrpcRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.GrpcRoute grpc_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearGrpcRoute() {
bitField0_ = (bitField0_ & ~0x00000002);
grpcRoute_ = null;
if (grpcRouteBuilder_ != null) {
grpcRouteBuilder_.dispose();
grpcRouteBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Updated GrpcRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.GrpcRoute grpc_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.networkservices.v1.GrpcRoute.Builder getGrpcRouteBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getGrpcRouteFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Updated GrpcRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.GrpcRoute grpc_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.networkservices.v1.GrpcRouteOrBuilder getGrpcRouteOrBuilder() {
if (grpcRouteBuilder_ != null) {
return grpcRouteBuilder_.getMessageOrBuilder();
} else {
return grpcRoute_ == null
? com.google.cloud.networkservices.v1.GrpcRoute.getDefaultInstance()
: grpcRoute_;
}
}
/**
*
*
* <pre>
* Required. Updated GrpcRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.GrpcRoute grpc_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.networkservices.v1.GrpcRoute,
com.google.cloud.networkservices.v1.GrpcRoute.Builder,
com.google.cloud.networkservices.v1.GrpcRouteOrBuilder>
getGrpcRouteFieldBuilder() {
if (grpcRouteBuilder_ == null) {
grpcRouteBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.networkservices.v1.GrpcRoute,
com.google.cloud.networkservices.v1.GrpcRoute.Builder,
com.google.cloud.networkservices.v1.GrpcRouteOrBuilder>(
getGrpcRoute(), getParentForChildren(), isClean());
grpcRoute_ = null;
}
return grpcRouteBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.networkservices.v1.UpdateGrpcRouteRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.networkservices.v1.UpdateGrpcRouteRequest)
private static final com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest();
}
public static com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateGrpcRouteRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateGrpcRouteRequest>() {
@java.lang.Override
public UpdateGrpcRouteRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateGrpcRouteRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateGrpcRouteRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.networkservices.v1.UpdateGrpcRouteRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,006 | java-networkservices/proto-google-cloud-networkservices-v1/src/main/java/com/google/cloud/networkservices/v1/UpdateHttpRouteRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/networkservices/v1/http_route.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.networkservices.v1;
/**
*
*
* <pre>
* Request used by the UpdateHttpRoute method.
* </pre>
*
* Protobuf type {@code google.cloud.networkservices.v1.UpdateHttpRouteRequest}
*/
public final class UpdateHttpRouteRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.networkservices.v1.UpdateHttpRouteRequest)
UpdateHttpRouteRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateHttpRouteRequest.newBuilder() to construct.
private UpdateHttpRouteRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateHttpRouteRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateHttpRouteRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.networkservices.v1.HttpRouteProto
.internal_static_google_cloud_networkservices_v1_UpdateHttpRouteRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.networkservices.v1.HttpRouteProto
.internal_static_google_cloud_networkservices_v1_UpdateHttpRouteRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.networkservices.v1.UpdateHttpRouteRequest.class,
com.google.cloud.networkservices.v1.UpdateHttpRouteRequest.Builder.class);
}
private int bitField0_;
public static final int UPDATE_MASK_FIELD_NUMBER = 1;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* HttpRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* HttpRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* HttpRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
public static final int HTTP_ROUTE_FIELD_NUMBER = 2;
private com.google.cloud.networkservices.v1.HttpRoute httpRoute_;
/**
*
*
* <pre>
* Required. Updated HttpRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.HttpRoute http_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the httpRoute field is set.
*/
@java.lang.Override
public boolean hasHttpRoute() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Updated HttpRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.HttpRoute http_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The httpRoute.
*/
@java.lang.Override
public com.google.cloud.networkservices.v1.HttpRoute getHttpRoute() {
return httpRoute_ == null
? com.google.cloud.networkservices.v1.HttpRoute.getDefaultInstance()
: httpRoute_;
}
/**
*
*
* <pre>
* Required. Updated HttpRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.HttpRoute http_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.networkservices.v1.HttpRouteOrBuilder getHttpRouteOrBuilder() {
return httpRoute_ == null
? com.google.cloud.networkservices.v1.HttpRoute.getDefaultInstance()
: httpRoute_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getHttpRoute());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getHttpRoute());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.networkservices.v1.UpdateHttpRouteRequest)) {
return super.equals(obj);
}
com.google.cloud.networkservices.v1.UpdateHttpRouteRequest other =
(com.google.cloud.networkservices.v1.UpdateHttpRouteRequest) obj;
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (hasHttpRoute() != other.hasHttpRoute()) return false;
if (hasHttpRoute()) {
if (!getHttpRoute().equals(other.getHttpRoute())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
if (hasHttpRoute()) {
hash = (37 * hash) + HTTP_ROUTE_FIELD_NUMBER;
hash = (53 * hash) + getHttpRoute().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.networkservices.v1.UpdateHttpRouteRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkservices.v1.UpdateHttpRouteRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.UpdateHttpRouteRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkservices.v1.UpdateHttpRouteRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.UpdateHttpRouteRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.networkservices.v1.UpdateHttpRouteRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.UpdateHttpRouteRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.networkservices.v1.UpdateHttpRouteRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.UpdateHttpRouteRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.networkservices.v1.UpdateHttpRouteRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.networkservices.v1.UpdateHttpRouteRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.networkservices.v1.UpdateHttpRouteRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.networkservices.v1.UpdateHttpRouteRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request used by the UpdateHttpRoute method.
* </pre>
*
* Protobuf type {@code google.cloud.networkservices.v1.UpdateHttpRouteRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.networkservices.v1.UpdateHttpRouteRequest)
com.google.cloud.networkservices.v1.UpdateHttpRouteRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.networkservices.v1.HttpRouteProto
.internal_static_google_cloud_networkservices_v1_UpdateHttpRouteRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.networkservices.v1.HttpRouteProto
.internal_static_google_cloud_networkservices_v1_UpdateHttpRouteRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.networkservices.v1.UpdateHttpRouteRequest.class,
com.google.cloud.networkservices.v1.UpdateHttpRouteRequest.Builder.class);
}
// Construct using com.google.cloud.networkservices.v1.UpdateHttpRouteRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getUpdateMaskFieldBuilder();
getHttpRouteFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
httpRoute_ = null;
if (httpRouteBuilder_ != null) {
httpRouteBuilder_.dispose();
httpRouteBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.networkservices.v1.HttpRouteProto
.internal_static_google_cloud_networkservices_v1_UpdateHttpRouteRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.networkservices.v1.UpdateHttpRouteRequest getDefaultInstanceForType() {
return com.google.cloud.networkservices.v1.UpdateHttpRouteRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.networkservices.v1.UpdateHttpRouteRequest build() {
com.google.cloud.networkservices.v1.UpdateHttpRouteRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.networkservices.v1.UpdateHttpRouteRequest buildPartial() {
com.google.cloud.networkservices.v1.UpdateHttpRouteRequest result =
new com.google.cloud.networkservices.v1.UpdateHttpRouteRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.networkservices.v1.UpdateHttpRouteRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.httpRoute_ = httpRouteBuilder_ == null ? httpRoute_ : httpRouteBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.networkservices.v1.UpdateHttpRouteRequest) {
return mergeFrom((com.google.cloud.networkservices.v1.UpdateHttpRouteRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.networkservices.v1.UpdateHttpRouteRequest other) {
if (other == com.google.cloud.networkservices.v1.UpdateHttpRouteRequest.getDefaultInstance())
return this;
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
if (other.hasHttpRoute()) {
mergeHttpRoute(other.getHttpRoute());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getHttpRouteFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* HttpRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* HttpRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* HttpRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* HttpRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* HttpRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* HttpRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000001);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* HttpRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* HttpRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Optional. Field mask is used to specify the fields to be overwritten in the
* HttpRoute resource by the update.
* The fields specified in the update_mask are relative to the resource, not
* the full request. A field will be overwritten if it is in the mask. If the
* user does not provide a mask then all fields will be overwritten.
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private com.google.cloud.networkservices.v1.HttpRoute httpRoute_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.networkservices.v1.HttpRoute,
com.google.cloud.networkservices.v1.HttpRoute.Builder,
com.google.cloud.networkservices.v1.HttpRouteOrBuilder>
httpRouteBuilder_;
/**
*
*
* <pre>
* Required. Updated HttpRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.HttpRoute http_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the httpRoute field is set.
*/
public boolean hasHttpRoute() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Updated HttpRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.HttpRoute http_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The httpRoute.
*/
public com.google.cloud.networkservices.v1.HttpRoute getHttpRoute() {
if (httpRouteBuilder_ == null) {
return httpRoute_ == null
? com.google.cloud.networkservices.v1.HttpRoute.getDefaultInstance()
: httpRoute_;
} else {
return httpRouteBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Updated HttpRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.HttpRoute http_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setHttpRoute(com.google.cloud.networkservices.v1.HttpRoute value) {
if (httpRouteBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
httpRoute_ = value;
} else {
httpRouteBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Updated HttpRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.HttpRoute http_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setHttpRoute(
com.google.cloud.networkservices.v1.HttpRoute.Builder builderForValue) {
if (httpRouteBuilder_ == null) {
httpRoute_ = builderForValue.build();
} else {
httpRouteBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Updated HttpRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.HttpRoute http_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeHttpRoute(com.google.cloud.networkservices.v1.HttpRoute value) {
if (httpRouteBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& httpRoute_ != null
&& httpRoute_ != com.google.cloud.networkservices.v1.HttpRoute.getDefaultInstance()) {
getHttpRouteBuilder().mergeFrom(value);
} else {
httpRoute_ = value;
}
} else {
httpRouteBuilder_.mergeFrom(value);
}
if (httpRoute_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Updated HttpRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.HttpRoute http_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearHttpRoute() {
bitField0_ = (bitField0_ & ~0x00000002);
httpRoute_ = null;
if (httpRouteBuilder_ != null) {
httpRouteBuilder_.dispose();
httpRouteBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Updated HttpRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.HttpRoute http_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.networkservices.v1.HttpRoute.Builder getHttpRouteBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getHttpRouteFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Updated HttpRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.HttpRoute http_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.networkservices.v1.HttpRouteOrBuilder getHttpRouteOrBuilder() {
if (httpRouteBuilder_ != null) {
return httpRouteBuilder_.getMessageOrBuilder();
} else {
return httpRoute_ == null
? com.google.cloud.networkservices.v1.HttpRoute.getDefaultInstance()
: httpRoute_;
}
}
/**
*
*
* <pre>
* Required. Updated HttpRoute resource.
* </pre>
*
* <code>
* .google.cloud.networkservices.v1.HttpRoute http_route = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.networkservices.v1.HttpRoute,
com.google.cloud.networkservices.v1.HttpRoute.Builder,
com.google.cloud.networkservices.v1.HttpRouteOrBuilder>
getHttpRouteFieldBuilder() {
if (httpRouteBuilder_ == null) {
httpRouteBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.networkservices.v1.HttpRoute,
com.google.cloud.networkservices.v1.HttpRoute.Builder,
com.google.cloud.networkservices.v1.HttpRouteOrBuilder>(
getHttpRoute(), getParentForChildren(), isClean());
httpRoute_ = null;
}
return httpRouteBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.networkservices.v1.UpdateHttpRouteRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.networkservices.v1.UpdateHttpRouteRequest)
private static final com.google.cloud.networkservices.v1.UpdateHttpRouteRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.networkservices.v1.UpdateHttpRouteRequest();
}
public static com.google.cloud.networkservices.v1.UpdateHttpRouteRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateHttpRouteRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateHttpRouteRequest>() {
@java.lang.Override
public UpdateHttpRouteRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateHttpRouteRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateHttpRouteRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.networkservices.v1.UpdateHttpRouteRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
google/java-photoslibrary | 37,018 | photoslibraryapi/src/main/java/com/google/photos/library/v1/proto/SearchMediaItemsResponse.java | // Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/photos/library/v1/photos_library.proto
package com.google.photos.library.v1.proto;
/**
*
*
* <pre>
* List of media items that match the search parameters.
* </pre>
*
* Protobuf type {@code google.photos.library.v1.SearchMediaItemsResponse}
*/
public final class SearchMediaItemsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.photos.library.v1.SearchMediaItemsResponse)
SearchMediaItemsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use SearchMediaItemsResponse.newBuilder() to construct.
private SearchMediaItemsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SearchMediaItemsResponse() {
mediaItems_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SearchMediaItemsResponse();
}
@java.lang.Override
public final com.google.protobuf.UnknownFieldSet getUnknownFields() {
return this.unknownFields;
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.photos.library.v1.proto.LibraryServiceProto
.internal_static_google_photos_library_v1_SearchMediaItemsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.photos.library.v1.proto.LibraryServiceProto
.internal_static_google_photos_library_v1_SearchMediaItemsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.photos.library.v1.proto.SearchMediaItemsResponse.class,
com.google.photos.library.v1.proto.SearchMediaItemsResponse.Builder.class);
}
public static final int MEDIA_ITEMS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.photos.types.proto.MediaItem> mediaItems_;
/**
*
*
* <pre>
* Output only. List of media items that match the search parameters.
* </pre>
*
* <code>repeated .google.photos.types.MediaItem media_items = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.photos.types.proto.MediaItem> getMediaItemsList() {
return mediaItems_;
}
/**
*
*
* <pre>
* Output only. List of media items that match the search parameters.
* </pre>
*
* <code>repeated .google.photos.types.MediaItem media_items = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.photos.types.proto.MediaItemOrBuilder>
getMediaItemsOrBuilderList() {
return mediaItems_;
}
/**
*
*
* <pre>
* Output only. List of media items that match the search parameters.
* </pre>
*
* <code>repeated .google.photos.types.MediaItem media_items = 1;</code>
*/
@java.lang.Override
public int getMediaItemsCount() {
return mediaItems_.size();
}
/**
*
*
* <pre>
* Output only. List of media items that match the search parameters.
* </pre>
*
* <code>repeated .google.photos.types.MediaItem media_items = 1;</code>
*/
@java.lang.Override
public com.google.photos.types.proto.MediaItem getMediaItems(int index) {
return mediaItems_.get(index);
}
/**
*
*
* <pre>
* Output only. List of media items that match the search parameters.
* </pre>
*
* <code>repeated .google.photos.types.MediaItem media_items = 1;</code>
*/
@java.lang.Override
public com.google.photos.types.proto.MediaItemOrBuilder getMediaItemsOrBuilder(int index) {
return mediaItems_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Output only. Use this token to get the next set of media items. Its
* presence is the only reliable indicator of more media items being available
* in the next request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Output only. Use this token to get the next set of media items. Its
* presence is the only reliable indicator of more media items being available
* in the next request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < mediaItems_.size(); i++) {
output.writeMessage(1, mediaItems_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < mediaItems_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, mediaItems_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.photos.library.v1.proto.SearchMediaItemsResponse)) {
return super.equals(obj);
}
com.google.photos.library.v1.proto.SearchMediaItemsResponse other =
(com.google.photos.library.v1.proto.SearchMediaItemsResponse) obj;
if (!getMediaItemsList().equals(other.getMediaItemsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getMediaItemsCount() > 0) {
hash = (37 * hash) + MEDIA_ITEMS_FIELD_NUMBER;
hash = (53 * hash) + getMediaItemsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.photos.library.v1.proto.SearchMediaItemsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.photos.library.v1.proto.SearchMediaItemsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.photos.library.v1.proto.SearchMediaItemsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.photos.library.v1.proto.SearchMediaItemsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.photos.library.v1.proto.SearchMediaItemsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.photos.library.v1.proto.SearchMediaItemsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.photos.library.v1.proto.SearchMediaItemsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.photos.library.v1.proto.SearchMediaItemsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.photos.library.v1.proto.SearchMediaItemsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.photos.library.v1.proto.SearchMediaItemsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.photos.library.v1.proto.SearchMediaItemsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.photos.library.v1.proto.SearchMediaItemsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.photos.library.v1.proto.SearchMediaItemsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* List of media items that match the search parameters.
* </pre>
*
* Protobuf type {@code google.photos.library.v1.SearchMediaItemsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.photos.library.v1.SearchMediaItemsResponse)
com.google.photos.library.v1.proto.SearchMediaItemsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.photos.library.v1.proto.LibraryServiceProto
.internal_static_google_photos_library_v1_SearchMediaItemsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.photos.library.v1.proto.LibraryServiceProto
.internal_static_google_photos_library_v1_SearchMediaItemsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.photos.library.v1.proto.SearchMediaItemsResponse.class,
com.google.photos.library.v1.proto.SearchMediaItemsResponse.Builder.class);
}
// Construct using com.google.photos.library.v1.proto.SearchMediaItemsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (mediaItemsBuilder_ == null) {
mediaItems_ = java.util.Collections.emptyList();
} else {
mediaItems_ = null;
mediaItemsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.photos.library.v1.proto.LibraryServiceProto
.internal_static_google_photos_library_v1_SearchMediaItemsResponse_descriptor;
}
@java.lang.Override
public com.google.photos.library.v1.proto.SearchMediaItemsResponse getDefaultInstanceForType() {
return com.google.photos.library.v1.proto.SearchMediaItemsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.photos.library.v1.proto.SearchMediaItemsResponse build() {
com.google.photos.library.v1.proto.SearchMediaItemsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.photos.library.v1.proto.SearchMediaItemsResponse buildPartial() {
com.google.photos.library.v1.proto.SearchMediaItemsResponse result =
new com.google.photos.library.v1.proto.SearchMediaItemsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.photos.library.v1.proto.SearchMediaItemsResponse result) {
if (mediaItemsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
mediaItems_ = java.util.Collections.unmodifiableList(mediaItems_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.mediaItems_ = mediaItems_;
} else {
result.mediaItems_ = mediaItemsBuilder_.build();
}
}
private void buildPartial0(com.google.photos.library.v1.proto.SearchMediaItemsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.photos.library.v1.proto.SearchMediaItemsResponse) {
return mergeFrom((com.google.photos.library.v1.proto.SearchMediaItemsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.photos.library.v1.proto.SearchMediaItemsResponse other) {
if (other == com.google.photos.library.v1.proto.SearchMediaItemsResponse.getDefaultInstance())
return this;
if (mediaItemsBuilder_ == null) {
if (!other.mediaItems_.isEmpty()) {
if (mediaItems_.isEmpty()) {
mediaItems_ = other.mediaItems_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureMediaItemsIsMutable();
mediaItems_.addAll(other.mediaItems_);
}
onChanged();
}
} else {
if (!other.mediaItems_.isEmpty()) {
if (mediaItemsBuilder_.isEmpty()) {
mediaItemsBuilder_.dispose();
mediaItemsBuilder_ = null;
mediaItems_ = other.mediaItems_;
bitField0_ = (bitField0_ & ~0x00000001);
mediaItemsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getMediaItemsFieldBuilder()
: null;
} else {
mediaItemsBuilder_.addAllMessages(other.mediaItems_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.photos.types.proto.MediaItem m =
input.readMessage(
com.google.photos.types.proto.MediaItem.parser(), extensionRegistry);
if (mediaItemsBuilder_ == null) {
ensureMediaItemsIsMutable();
mediaItems_.add(m);
} else {
mediaItemsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.photos.types.proto.MediaItem> mediaItems_ =
java.util.Collections.emptyList();
private void ensureMediaItemsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
mediaItems_ = new java.util.ArrayList<com.google.photos.types.proto.MediaItem>(mediaItems_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.photos.types.proto.MediaItem,
com.google.photos.types.proto.MediaItem.Builder,
com.google.photos.types.proto.MediaItemOrBuilder>
mediaItemsBuilder_;
/**
*
*
* <pre>
* Output only. List of media items that match the search parameters.
* </pre>
*
* <code>repeated .google.photos.types.MediaItem media_items = 1;</code>
*/
public java.util.List<com.google.photos.types.proto.MediaItem> getMediaItemsList() {
if (mediaItemsBuilder_ == null) {
return java.util.Collections.unmodifiableList(mediaItems_);
} else {
return mediaItemsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Output only. List of media items that match the search parameters.
* </pre>
*
* <code>repeated .google.photos.types.MediaItem media_items = 1;</code>
*/
public int getMediaItemsCount() {
if (mediaItemsBuilder_ == null) {
return mediaItems_.size();
} else {
return mediaItemsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Output only. List of media items that match the search parameters.
* </pre>
*
* <code>repeated .google.photos.types.MediaItem media_items = 1;</code>
*/
public com.google.photos.types.proto.MediaItem getMediaItems(int index) {
if (mediaItemsBuilder_ == null) {
return mediaItems_.get(index);
} else {
return mediaItemsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Output only. List of media items that match the search parameters.
* </pre>
*
* <code>repeated .google.photos.types.MediaItem media_items = 1;</code>
*/
public Builder setMediaItems(int index, com.google.photos.types.proto.MediaItem value) {
if (mediaItemsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMediaItemsIsMutable();
mediaItems_.set(index, value);
onChanged();
} else {
mediaItemsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Output only. List of media items that match the search parameters.
* </pre>
*
* <code>repeated .google.photos.types.MediaItem media_items = 1;</code>
*/
public Builder setMediaItems(
int index, com.google.photos.types.proto.MediaItem.Builder builderForValue) {
if (mediaItemsBuilder_ == null) {
ensureMediaItemsIsMutable();
mediaItems_.set(index, builderForValue.build());
onChanged();
} else {
mediaItemsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Output only. List of media items that match the search parameters.
* </pre>
*
* <code>repeated .google.photos.types.MediaItem media_items = 1;</code>
*/
public Builder addMediaItems(com.google.photos.types.proto.MediaItem value) {
if (mediaItemsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMediaItemsIsMutable();
mediaItems_.add(value);
onChanged();
} else {
mediaItemsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Output only. List of media items that match the search parameters.
* </pre>
*
* <code>repeated .google.photos.types.MediaItem media_items = 1;</code>
*/
public Builder addMediaItems(int index, com.google.photos.types.proto.MediaItem value) {
if (mediaItemsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureMediaItemsIsMutable();
mediaItems_.add(index, value);
onChanged();
} else {
mediaItemsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Output only. List of media items that match the search parameters.
* </pre>
*
* <code>repeated .google.photos.types.MediaItem media_items = 1;</code>
*/
public Builder addMediaItems(com.google.photos.types.proto.MediaItem.Builder builderForValue) {
if (mediaItemsBuilder_ == null) {
ensureMediaItemsIsMutable();
mediaItems_.add(builderForValue.build());
onChanged();
} else {
mediaItemsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Output only. List of media items that match the search parameters.
* </pre>
*
* <code>repeated .google.photos.types.MediaItem media_items = 1;</code>
*/
public Builder addMediaItems(
int index, com.google.photos.types.proto.MediaItem.Builder builderForValue) {
if (mediaItemsBuilder_ == null) {
ensureMediaItemsIsMutable();
mediaItems_.add(index, builderForValue.build());
onChanged();
} else {
mediaItemsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Output only. List of media items that match the search parameters.
* </pre>
*
* <code>repeated .google.photos.types.MediaItem media_items = 1;</code>
*/
public Builder addAllMediaItems(
java.lang.Iterable<? extends com.google.photos.types.proto.MediaItem> values) {
if (mediaItemsBuilder_ == null) {
ensureMediaItemsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, mediaItems_);
onChanged();
} else {
mediaItemsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Output only. List of media items that match the search parameters.
* </pre>
*
* <code>repeated .google.photos.types.MediaItem media_items = 1;</code>
*/
public Builder clearMediaItems() {
if (mediaItemsBuilder_ == null) {
mediaItems_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
mediaItemsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Output only. List of media items that match the search parameters.
* </pre>
*
* <code>repeated .google.photos.types.MediaItem media_items = 1;</code>
*/
public Builder removeMediaItems(int index) {
if (mediaItemsBuilder_ == null) {
ensureMediaItemsIsMutable();
mediaItems_.remove(index);
onChanged();
} else {
mediaItemsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Output only. List of media items that match the search parameters.
* </pre>
*
* <code>repeated .google.photos.types.MediaItem media_items = 1;</code>
*/
public com.google.photos.types.proto.MediaItem.Builder getMediaItemsBuilder(int index) {
return getMediaItemsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Output only. List of media items that match the search parameters.
* </pre>
*
* <code>repeated .google.photos.types.MediaItem media_items = 1;</code>
*/
public com.google.photos.types.proto.MediaItemOrBuilder getMediaItemsOrBuilder(int index) {
if (mediaItemsBuilder_ == null) {
return mediaItems_.get(index);
} else {
return mediaItemsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Output only. List of media items that match the search parameters.
* </pre>
*
* <code>repeated .google.photos.types.MediaItem media_items = 1;</code>
*/
public java.util.List<? extends com.google.photos.types.proto.MediaItemOrBuilder>
getMediaItemsOrBuilderList() {
if (mediaItemsBuilder_ != null) {
return mediaItemsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(mediaItems_);
}
}
/**
*
*
* <pre>
* Output only. List of media items that match the search parameters.
* </pre>
*
* <code>repeated .google.photos.types.MediaItem media_items = 1;</code>
*/
public com.google.photos.types.proto.MediaItem.Builder addMediaItemsBuilder() {
return getMediaItemsFieldBuilder()
.addBuilder(com.google.photos.types.proto.MediaItem.getDefaultInstance());
}
/**
*
*
* <pre>
* Output only. List of media items that match the search parameters.
* </pre>
*
* <code>repeated .google.photos.types.MediaItem media_items = 1;</code>
*/
public com.google.photos.types.proto.MediaItem.Builder addMediaItemsBuilder(int index) {
return getMediaItemsFieldBuilder()
.addBuilder(index, com.google.photos.types.proto.MediaItem.getDefaultInstance());
}
/**
*
*
* <pre>
* Output only. List of media items that match the search parameters.
* </pre>
*
* <code>repeated .google.photos.types.MediaItem media_items = 1;</code>
*/
public java.util.List<com.google.photos.types.proto.MediaItem.Builder>
getMediaItemsBuilderList() {
return getMediaItemsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.photos.types.proto.MediaItem,
com.google.photos.types.proto.MediaItem.Builder,
com.google.photos.types.proto.MediaItemOrBuilder>
getMediaItemsFieldBuilder() {
if (mediaItemsBuilder_ == null) {
mediaItemsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.photos.types.proto.MediaItem,
com.google.photos.types.proto.MediaItem.Builder,
com.google.photos.types.proto.MediaItemOrBuilder>(
mediaItems_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
mediaItems_ = null;
}
return mediaItemsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Output only. Use this token to get the next set of media items. Its
* presence is the only reliable indicator of more media items being available
* in the next request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Output only. Use this token to get the next set of media items. Its
* presence is the only reliable indicator of more media items being available
* in the next request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Output only. Use this token to get the next set of media items. Its
* presence is the only reliable indicator of more media items being available
* in the next request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Use this token to get the next set of media items. Its
* presence is the only reliable indicator of more media items being available
* in the next request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Output only. Use this token to get the next set of media items. Its
* presence is the only reliable indicator of more media items being available
* in the next request.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.photos.library.v1.SearchMediaItemsResponse)
}
// @@protoc_insertion_point(class_scope:google.photos.library.v1.SearchMediaItemsResponse)
private static final com.google.photos.library.v1.proto.SearchMediaItemsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.photos.library.v1.proto.SearchMediaItemsResponse();
}
public static com.google.photos.library.v1.proto.SearchMediaItemsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SearchMediaItemsResponse> PARSER =
new com.google.protobuf.AbstractParser<SearchMediaItemsResponse>() {
@java.lang.Override
public SearchMediaItemsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SearchMediaItemsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SearchMediaItemsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.photos.library.v1.proto.SearchMediaItemsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
google/libphonenumber | 37,248 | metadata/src/main/java/com/google/i18n/phonenumbers/metadata/model/NumberingScheme.java | /*
* Copyright (C) 2017 The Libphonenumber Authors.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.i18n.phonenumbers.metadata.model;
import static com.google.common.base.Preconditions.checkArgument;
import static com.google.common.base.Preconditions.checkState;
import static com.google.common.collect.ImmutableList.toImmutableList;
import static com.google.i18n.phonenumbers.metadata.model.MetadataException.checkMetadata;
import static com.google.i18n.phonenumbers.metadata.model.XmlRangesSchema.AREA_CODE_LENGTH;
import static com.google.i18n.phonenumbers.metadata.model.XmlRangesSchema.FORMAT;
import static com.google.i18n.phonenumbers.metadata.model.XmlRangesSchema.NATIONAL_ONLY;
import static com.google.i18n.phonenumbers.metadata.model.XmlRangesSchema.PER_REGION_COLUMNS;
import static com.google.i18n.phonenumbers.metadata.model.XmlRangesSchema.REGIONS;
import static java.lang.Boolean.TRUE;
import static java.util.Comparator.comparing;
import com.google.auto.value.AutoValue;
import com.google.common.base.Joiner;
import com.google.common.base.Splitter;
import com.google.common.collect.ContiguousSet;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.ImmutableSortedMap;
import com.google.common.collect.ImmutableSortedSet;
import com.google.common.collect.ImmutableTable;
import com.google.common.collect.Ordering;
import com.google.common.collect.Sets;
import com.google.common.collect.Table;
import com.google.i18n.phonenumbers.metadata.DigitSequence;
import com.google.i18n.phonenumbers.metadata.PrefixTree;
import com.google.i18n.phonenumbers.metadata.RangeSpecification;
import com.google.i18n.phonenumbers.metadata.RangeTree;
import com.google.i18n.phonenumbers.metadata.i18n.PhoneRegion;
import com.google.i18n.phonenumbers.metadata.model.FormatSpec.FormatTemplate;
import com.google.i18n.phonenumbers.metadata.model.NumberingScheme.Comment.Anchor;
import com.google.i18n.phonenumbers.metadata.proto.Types.ValidNumberType;
import com.google.i18n.phonenumbers.metadata.proto.Types.XmlNumberType;
import com.google.i18n.phonenumbers.metadata.proto.Types.XmlShortcodeType;
import com.google.i18n.phonenumbers.metadata.table.RangeTable;
import com.google.i18n.phonenumbers.metadata.table.RangeTable.OverwriteMode;
import com.google.i18n.phonenumbers.metadata.table.Schema;
import java.util.ArrayList;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Optional;
import java.util.Set;
import java.util.TreeSet;
/**
* An abstraction of all the phone number metadata known about for a single calling code.
* <p>
* Note that there is no builder for NumberingScheme. The expectation is that CSV tables and other
* primary sources will be used to build numbering schemes at a single point in the business logic.
* Handling incremental modification of a builder, or partially built schemes just isn't something
* that's expected to be needed (though there is {@code TestNumberingScheme} for use in unit tests.
*/
@AutoValue
public abstract class NumberingScheme {
// Bitmask for [1-9] (bits 1..9 set, bit 0 clear).
private static final int NOT_ZERO_MASK = 0x3FE;
private static final String JAPAN_COUNTRY_CODE = "81";
/** Top level information about a numbering scheme. */
@AutoValue
public abstract static class Attributes {
/** Returns a new attributes instance for the given data. */
public static Attributes create(
DigitSequence cc,
PhoneRegion mainRegion,
Set<PhoneRegion> extraRegions,
ImmutableSet<DigitSequence> nationalPrefix,
RangeTree carrierPrefixes,
String defaultIddPrefix,
RangeTree allIddRanges,
String extensionPrefix,
Set<PhoneRegion> mobilePortableRegions) {
// In theory there could be IDD prefix for a non-geographic region (and this check could be
// removed) but it's not something we've ever seen and don't have any expectation of.
checkMetadata(!mainRegion.equals(PhoneRegion.getWorld()) || allIddRanges.isEmpty(),
"[%s] IDD prefixes must not be present for non-geographic regions", cc);
checkMetadata(mainRegion.equals(PhoneRegion.getWorld()) || !allIddRanges.isEmpty(),
"[%s] IDD prefixes must be present for all geographic regions", cc);
checkMetadata(nationalPrefix.stream().noneMatch(allIddRanges::contains),
"[%s] National prefix %s and IDD prefixes (%s) must be disjoint",
cc, nationalPrefix, allIddRanges);
checkMetadata(nationalPrefix.stream().noneMatch(carrierPrefixes::contains),
"[%s] National prefix %s and carrier prefixes (%s) must be disjoint",
cc, nationalPrefix, carrierPrefixes);
// Allow exactly one '~' to separate the prefix digits to indicate a pause during dialling
// (this check could be relaxed in future, but it's currently true for all data).
checkMetadata(defaultIddPrefix.isEmpty() || defaultIddPrefix.matches("[0-9]+(?:~[0-9]+)?"),
"[%s] Invalid IDD prefix: %s", cc, defaultIddPrefix);
DigitSequence iddPrefix = DigitSequence.of(defaultIddPrefix.replace("~", ""));
checkMetadata(iddPrefix.isEmpty() || allIddRanges.contains(iddPrefix),
"[%s] IDD ranges must contain the default prefix: %s", cc, iddPrefix);
checkMetadata(!extraRegions.contains(mainRegion),
"[%s] duplicated main region '%s' in extra regions: %s",
cc, mainRegion, extraRegions);
// Main region comes first in iteration order, remaining regions are ordered naturally.
ImmutableSet.Builder<PhoneRegion> set = ImmutableSet.builder();
set.add(mainRegion);
extraRegions.stream().sorted().forEach(set::add);
ImmutableSet<PhoneRegion> allRegions = set.build();
checkMetadata(allRegions.containsAll(mobilePortableRegions),
"invalid mobile portable regions: %s", mobilePortableRegions);
return new AutoValue_NumberingScheme_Attributes(
cc,
allRegions,
nationalPrefix,
carrierPrefixes,
defaultIddPrefix,
allIddRanges,
!extensionPrefix.isEmpty() ? Optional.of(extensionPrefix) : Optional.empty(),
ImmutableSortedSet.copyOf(Ordering.natural(), mobilePortableRegions));
}
/** Returns the unique calling code of this numbering scheme. */
public abstract DigitSequence getCallingCode();
/**
* Returns the regions represented by this numbering scheme. The main region is always present
* and listed first, and remaining regions are listed in "natural" order.
*/
public abstract ImmutableSet<PhoneRegion> getRegions();
/**
* Returns the "main" region for this numbering scheme. The notion of a main region for a
* country calling code is slightly archaic and mostly comes from the way in which the legacy
* XML data is structured. However there are a few places in the public API where the "main"
* region is returned in situations of ambiguity, so it can be useful to know it.
*/
public final PhoneRegion getMainRegion() {
return getRegions().asList().get(0);
}
/**
* Returns all possible national prefixes which can be used when dialling national numbers. In
* most cases this set just contains the preferred prefix, but alternate values may be present
* when a region switches between prefixes or for other reasons. Any "non preferred" prefixes
* are recognized only during parsing, and otherwise ignored.
*
* <p>If there is a preferred prefix, it is listed first, otherwise the set is empty.
*/
public abstract ImmutableSet<DigitSequence> getNationalPrefixes();
/**
* Returns the (possibly empty) prefix used when dialling national numbers (e.g. "0" for "US").
* Not all regions require a prefix for national dialling.
*/
public DigitSequence getPreferredNationalPrefix() {
ImmutableSet<DigitSequence> prefixes = getNationalPrefixes();
return prefixes.isEmpty() ? DigitSequence.empty() : prefixes.iterator().next();
}
/**
* Returns all carrier prefixes for national dialling. This range must not contain the national
* prefix.
*/
public abstract RangeTree getCarrierPrefixes();
/**
* Returns the (possible empty) default international dialling (IDD) prefix, possibly
* containing a '~' to indicate a pause during dialling (e.g. "8~10" for Russia).
*/
public abstract String getDefaultIddPrefix();
/**
* Returns all IDD prefixes which may be used for international dialling. If the default prefix
* is not empty it must be contained in this range.
*/
public abstract RangeTree getIddPrefixes();
/** Returns the preferred label to use for indicating extensions for numbers. */
public abstract Optional<String> getExtensionPrefix();
/** Returns the regions in which mobile numbers are portable between carriers. */
public abstract ImmutableSet<PhoneRegion> getMobilePortableRegions();
}
/**
* Creates a numbering scheme from a range table and example numbers. No rules are applied to the
* data in the tables, and they are assumed to be complete.
*/
public static NumberingScheme from(
Attributes attributes,
RangeTable xmlTable,
Map<PhoneRegion, RangeTable> shortcodeMap,
Map<String, FormatSpec> formats,
ImmutableList<AltFormatSpec> altFormats,
Table<PhoneRegion, ValidNumberType, DigitSequence> exampleNumbers,
List<Comment> comments) {
checkPossibleRegions(attributes.getRegions(), xmlTable);
checkNationalOnly(attributes, xmlTable);
checkUnambiguousIdd(attributes, xmlTable, formats);
ImmutableSortedMap<PhoneRegion, RangeTable> shortcodes =
checkShortCodeConsistency(shortcodeMap, xmlTable);
return new AutoValue_NumberingScheme(
attributes,
xmlTable,
shortcodes,
checkFormatConsistency(attributes, formats, xmlTable, shortcodes),
checkAltFormatConsistency(altFormats, formats, xmlTable),
checkExampleNumbers(attributes.getRegions(), xmlTable, exampleNumbers),
addSyntheticComments(comments, attributes));
}
// Adds the first comments for main and auxiliary regions, giving the English name and detailing
// auxiliary region information if necessary.
private static ImmutableList<Comment> addSyntheticComments(
List<Comment> comments, Attributes attributes) {
PhoneRegion mainRegion = attributes.getMainRegion();
if (!mainRegion.equals(PhoneRegion.getWorld())) {
List<Comment> modified = new ArrayList<>(getRegionNameComments(mainRegion));
List<PhoneRegion> auxRegions =
attributes.getRegions().asList().subList(1, attributes.getRegions().size());
if (!auxRegions.isEmpty()) {
String comment = String.format("Main region for '%s'", Joiner.on(',').join(auxRegions));
modified.add(Comment.create(Comment.anchor(mainRegion), ImmutableList.of(comment)));
for (PhoneRegion r : auxRegions) {
modified.addAll(getRegionNameComments(r));
String auxComment =
String.format("Calling code and formatting shared with '%s'", mainRegion);
modified.add(Comment.create(Comment.anchor(r), ImmutableList.of(auxComment)));
}
}
// Do this last, since order matters (because anchors are not unique) and we want the
// synthetic comments to come first.
modified.addAll(comments);
comments = modified;
}
return ImmutableList.copyOf(comments);
}
private static List<Comment> getRegionNameComments(PhoneRegion region) {
ImmutableList<String> enName = ImmutableList.of(region.getEnglishNameForXmlComments());
return ImmutableList.of(
Comment.create(Comment.anchor(region), enName),
Comment.create(Comment.shortcodeAnchor(region), enName));
}
private static void checkPossibleRegions(Set<PhoneRegion> regions, RangeTable xmlTable) {
ImmutableSet<PhoneRegion> actual = REGIONS.extractGroupColumns(xmlTable.getColumns()).keySet();
// Allow no region column in the table if there's only one region (since it's implicit).
checkState((actual.isEmpty() && regions.size() == 1) || actual.equals(regions),
"regions added to range table do not match the expected numbering scheme regions\n"
+ "expected: %s\n"
+ "actual: %s\n",
regions, actual);
}
// An assumption has generally been that if a range is "national only" then it either:
// a) belongs to only one region (the one it's national only for)
// b) belongs to at least the main region (since in some schemes ranges mostly just overlap all
// possible regions).
// Thus we preclude the possibility of having a "national only" number that appears in multiple
// regions, but not the main region.
//
// If this check is ever removed (because there is real data where this is not the case), then
// the code which generates the "<noInternationalDialling>" patterns will have to be revisited.
private static void checkNationalOnly(Attributes attributes, RangeTable xmlTable) {
RangeTree allNationalOnly = xmlTable.getRanges(NATIONAL_ONLY, true);
if (allNationalOnly.isEmpty()) {
return;
}
ImmutableList<PhoneRegion> regions = attributes.getRegions().asList();
PhoneRegion main = regions.get(0);
// Anything assigned to the main region can be ignored as we allow it to have multiple regions.
// Now we have to ensure that these ranges are assigned to exactly one auxiliary region.
RangeTree remaining =
allNationalOnly.subtract(xmlTable.getRanges(REGIONS.getColumn(main), true));
if (remaining.isEmpty()) {
return;
}
DigitSequence cc = attributes.getCallingCode();
for (PhoneRegion r : regions.subList(1, regions.size())) {
RangeTree auxNationalOnly =
xmlTable.getRanges(REGIONS.getColumn(r), true).intersect(allNationalOnly);
// Anything already removed from "remaining" was already accounted for by another region.
checkMetadata(remaining.containsAll(auxNationalOnly),
"[%s] %s has national-only ranges which overlap other regions: %s",
cc, r, auxNationalOnly.subtract(remaining));
remaining = remaining.subtract(auxNationalOnly);
}
// This is not data issue since it should have been checked already, this is bug.
checkState(remaining.isEmpty(), "[%s] ranges not assigned to any region: %s", cc, remaining);
}
/**
* Ensures no national range can start with an IDD (international dialling code of any kind).
* This is slightly more complex than just looking for any IDD prefix at the start of a range
* because of cases like India, where "00800..." is a valid range and does start with IDD.
*
* <p>We allow this because:
* <ol>
* <li>The number is required to have the national prefix in front, so must be dialled as
* {@code 000800...} (according to the Indian numbering plan)
* <li>and {@code 000...} is not a valid sequence that would lead to dialing into another region,
* because all calling codes start with {@code [1-9]}.
* </ol>
*/
private static void checkUnambiguousIdd(
Attributes attributes, RangeTable xmlTable, Map<String, FormatSpec> formats) {
// It can be empty for non-geographic (world) numbering schemes.
if (attributes.getIddPrefixes().isEmpty()) {
return;
}
// All IDDs extended by one non-zero digit. These are the prefixes which if dialled may end
// up in another region, so they cannot be allowed at the start of any national number.
RangeTree iddPlusOneDigit = attributes.getIddPrefixes().map(r -> r.extendByMask(NOT_ZERO_MASK));
// We only care about ranges up to this length, which can speed things up.
int maxPrefixLength = iddPlusOneDigit.getLengths().last();
// Now prefix any ranges which could be dialled with a national prefix with all possible
// national prefixes, based on how they are formatted (and assume that no format means no
// national prefix).
RangeTree withNationalPrefix = RangeTree.empty();
RangeTree withoutNationalPrefix = xmlTable.getRanges(FORMAT, FORMAT.defaultValue());
for (String fid : formats.keySet()) {
FormatSpec spec = formats.get(fid);
// Only bother with ranges up to the maximum prefix length we care about.
RangeTree r = xmlTable.getRanges(FORMAT, fid).slice(0, maxPrefixLength);
if (spec.nationalPrefixOptional()) {
withNationalPrefix = withNationalPrefix.union(r);
withoutNationalPrefix = withoutNationalPrefix.union(r);
} else if (spec.national().hasNationalPrefix()) {
withNationalPrefix = withNationalPrefix.union(r);
} else {
withoutNationalPrefix = withoutNationalPrefix.union(r);
}
}
// Only here due to lambdas requiring an effectively final field (this makes me sad).
RangeTree withNationalPrefixCopy = withNationalPrefix;
RangeTree allDiallablePrefixes =
withoutNationalPrefix
.union(attributes.getNationalPrefixes().stream()
.map(RangeSpecification::from)
.map(p -> withNationalPrefixCopy.prefixWith(p))
.reduce(RangeTree.empty(), RangeTree::union));
// These are prefixes which are claimed to be nationally diallable but overlap with the IDD.
RangeTree iddOverlap = PrefixTree.from(iddPlusOneDigit).retainFrom(allDiallablePrefixes);
checkMetadata(iddOverlap.isEmpty(),
"[%s] ranges cannot start with IDD: %s", attributes.getCallingCode(), iddOverlap);
}
/**
* Ensures the shortcodes are disjoint from main ranges and consistent with each other by format
* (since format information isn't held separately for each shortcode table).
*/
private static ImmutableSortedMap<PhoneRegion, RangeTable> checkShortCodeConsistency(
Map<PhoneRegion, RangeTable> shortcodeMap, RangeTable table) {
ImmutableSortedMap<PhoneRegion, RangeTable> shortcodes =
ImmutableSortedMap.copyOf(shortcodeMap);
shortcodes.forEach((region, shortcodeTable) -> {
RangeTree overlap = table.getAllRanges().intersect(shortcodeTable.getAllRanges());
checkMetadata(overlap.isEmpty(),
"Shortcode and national numbers overlap for %s: %s", region, overlap);
});
return shortcodes;
}
private static final Schema FORMAT_SCHEMA =
Schema.builder().add(AREA_CODE_LENGTH).add(FORMAT).build();
// We actually explicitly permit duplicate formats (for now) since the XML has them. Later, once
// everything is settled, it might be possible to add a check here.
private static ImmutableMap<String, FormatSpec> checkFormatConsistency(
Attributes attributes,
Map<String, FormatSpec> formatMap,
RangeTable table,
Map<PhoneRegion, RangeTable> shortcodes) {
DigitSequence cc = attributes.getCallingCode();
RangeTable.Builder allFormats = RangeTable.builder(FORMAT_SCHEMA);
allFormats.copyNonDefaultValues(AREA_CODE_LENGTH, table, OverwriteMode.ALWAYS);
allFormats.copyNonDefaultValues(FORMAT, table, OverwriteMode.ALWAYS);
// Throws a RangeException (IllegalArgumentException) if inconsistent write occurs.
shortcodes.values()
.forEach(t -> allFormats.copyNonDefaultValues(FORMAT, t, OverwriteMode.SAME));
RangeTable formatTable = allFormats.build();
ImmutableMap<String, FormatSpec> formats = ImmutableMap.copyOf(formatMap);
// TODO: Make this "equals" eventually (since it currently sees "synthetic" IDs).
checkMetadata(
formats.keySet().containsAll(formatTable.getAssignedValues(FORMAT)),
"[%s] mismatched format IDs: %s",
cc, Sets.symmetricDifference(formatTable.getAssignedValues(FORMAT), formats.keySet()));
// If any of the checks relating to carrier formats are relaxed here, it might be necessary to
// re-evaluate the logic around regeneration of nationalPrefixForParsing (so be careful!).
boolean carrierTemplatesExist = false;
boolean nationalPrefixExistsForFormatting = false;
boolean nationalPrefixSometimesOptional = false;
for (String id : formats.keySet()) {
FormatSpec spec = formats.get(id);
RangeTree assigned = allFormats.getRanges(FORMAT, id);
checkMetadata(!assigned.isEmpty(),
"[%s] format specifier '%s' not assigned to any range: %s", cc, id, spec);
checkFormatLengths(cc, spec, assigned);
checkLocalFormatLengths(cc, formatTable, spec, assigned);
carrierTemplatesExist |= spec.carrier().isPresent();
nationalPrefixExistsForFormatting |=
spec.national().hasNationalPrefix()
|| spec.carrier().map(FormatTemplate::hasNationalPrefix).orElse(false);
nationalPrefixSometimesOptional |= spec.nationalPrefixOptional();
}
// Only if the present region is not JP do this check as in Japan we are not capturing domestic
// carrier codes.
if (!cc.toString().equals(JAPAN_COUNTRY_CODE)) {
checkMetadata(
attributes.getCarrierPrefixes().isEmpty() || carrierTemplatesExist,
"[%s] carrier prefixes exist but no formats have carrier templates: %s",
cc,
formats.values());
}
checkMetadata(!attributes.getNationalPrefixes().isEmpty() || !nationalPrefixExistsForFormatting,
"[%s] if no national prefix exists, it cannot be specified in any format template: %s",
cc, formats.values());
checkMetadata(!attributes.getNationalPrefixes().isEmpty() || !nationalPrefixSometimesOptional,
"[%s] if no national prefix exists, it cannot be optional for formatting: %s",
cc, formats.values());
return formats;
}
// Checks that the ranges to which formats are assigned don't have lengths outside the possible
// lengths of that format (e.g. we don't have "12xx" assigned to the format "XXX-XXX").
private static void checkFormatLengths(DigitSequence cc, FormatSpec spec, RangeTree assigned) {
TreeSet<Integer> unexpected = new TreeSet<>(assigned.getLengths());
unexpected.removeAll(ContiguousSet.closed(spec.minLength(), spec.maxLength()));
if (!unexpected.isEmpty()) {
RangeTree bad = RangeTree.empty();
for (int n : unexpected) {
bad = bad.union(assigned.intersect(RangeTree.from(RangeSpecification.any(n))));
}
throw new IllegalArgumentException(String.format(
"[%s] format %s assigned to ranges of invalid length: %s", cc, spec, bad));
}
}
// Checks that the local lengths for ranges (as determined by area code length) is compatible
// with the assigned local format specifier. Note that it is allowed to have an area code length
// of zero and still be assigned a format with a local specifier (the specifier may be shared
// with other ranges which do have an area code length).
private static void checkLocalFormatLengths(
DigitSequence cc, RangeTable formatTable, FormatSpec spec, RangeTree assigned) {
if (!spec.local().isPresent()) {
return;
}
ImmutableSet<Integer> lengths =
formatTable.subTable(assigned, AREA_CODE_LENGTH).getAssignedValues(AREA_CODE_LENGTH);
FormatTemplate local = spec.local().get();
// Format specifiers either vary length in the area code or the local number, but not both.
int localLength = local.minLength();
int localVariance = local.maxLength() - local.minLength();
if (localVariance == 0) {
// If there's no length variation in the "local" part, it means the area code length can
// be variable.
ContiguousSet<Integer> acls =
ContiguousSet.closed(spec.minLength() - localLength, spec.maxLength() - localLength);
checkMetadata(acls.containsAll(lengths),
"[%s] area code lengths '%s' not supported by format: %s", cc, acls, spec);
} else {
// If the length variation of the format is in the trailing "local" part, we expect the a
// unique area code length (only one "group" in the format can be variable).
checkMetadata((spec.maxLength() - spec.minLength()) == localVariance,
"[%s] invalid local format (bad length) in format specifier %s", cc, spec);
int acl = spec.minLength() - localLength;
checkMetadata(lengths.size() == 1 && lengths.contains(acl),
"[%s] implied area code length(s) %s does not match expected length (%s) of format: %s",
cc, lengths, acl, spec);
}
}
private static ImmutableList<AltFormatSpec> checkAltFormatConsistency(
ImmutableList<AltFormatSpec> altFormats,
Map<String, FormatSpec> formats,
RangeTable xmlTable) {
for (AltFormatSpec altFormat : altFormats) {
String parentId = altFormat.parentFormatId();
FormatSpec parent = formats.get(parentId);
checkMetadata(parent != null, "unknown parent format ID in alternate format: %s", altFormat);
Set<Integer> altLengths = getLengths(altFormat.template());
checkMetadata(getLengths(parent.national()).containsAll(altLengths),
"alternate format lengths must be bounded by parent format lengths: %s", altFormat);
// Only care about the parent ranges which have the same length(s) as the alt format.
RangeTree lengthMask = RangeTree.from(altLengths.stream().map(RangeSpecification::any));
RangeTree ranges = xmlTable.getRanges(FORMAT, parentId).intersect(lengthMask);
RangeTree captured = PrefixTree.from(altFormat.prefix()).retainFrom(ranges);
checkMetadata(!captured.isEmpty(),
"alternate format must capture some of the parent format ranges: %s", altFormat);
int prefixLength = altFormat.prefix().length();
if (prefixLength > 0) {
// A really ugly, but useful check to find if there's a better prefix. Specifically, it
// determines if the given prefix is "over-capturing" ranges (e.g. prefix is "1[2-8]" but
// only "1[3-6]" exists in the parent format's assigned ranges). Since this is an odd, non
// set-like operation, it's just done "manually" using bit masks. It's not a union of the
// paths, it's a "squashing" (since it results in the smallest single range specification).
//
// Start with all the paths trimmed to the prefix length (e.g. "123", "145", "247"). All
// range specifications in the slice are the same length as the prefix we started with.
RangeTree slice = captured.slice(prefixLength);
// Now union the digit masks at each depth for all paths in the slice (in theory there
// could be a "squash" operation on RangeSpecification to do all this).
int[] masks = new int[prefixLength];
slice.asRangeSpecifications().forEach(s -> {
for (int n = 0; n < prefixLength; n++) {
masks[n] |= s.getBitmask(n);
}
});
// Now reconstruct the single "squashed" range specification (e.g. "[12][24][357]").
RangeSpecification minSpec = RangeSpecification.empty();
for (int n = 0; n < prefixLength; n++) {
minSpec = minSpec.extendByMask(masks[n]);
}
checkMetadata(minSpec.equals(altFormat.prefix()),
"alternate format prefix '%s' is too broad, it should be '%s' for: %s",
altFormat.prefix(), minSpec, altFormat);
}
}
return altFormats;
}
private static Set<Integer> getLengths(FormatTemplate t) {
return ContiguousSet.closed(t.minLength(), t.maxLength());
}
// Checks that example numbers are valid numbers in the ranges for their type.
private static ImmutableTable<PhoneRegion, ValidNumberType, DigitSequence> checkExampleNumbers(
Set<PhoneRegion> regions,
RangeTable table,
Table<PhoneRegion, ValidNumberType, DigitSequence> exampleNumbers) {
for (PhoneRegion r : regions) {
RangeTable regionTable =
table.subTable(table.getRanges(REGIONS.getColumn(r), TRUE), XmlRangesSchema.TYPE);
Map<ValidNumberType, DigitSequence> regionExamples = exampleNumbers.row(r);
ImmutableSet<ValidNumberType> types = regionTable.getAssignedValues(XmlRangesSchema.TYPE);
checkMetadata(types.equals(regionExamples.keySet()),
"mismatched types for example numbers in region %s\nExpected: %s\nActual: %s",
r, types, regionExamples);
for (ValidNumberType t : types) {
DigitSequence exampleNumber = regionExamples.get(t);
RangeTree ranges = regionTable.getRanges(XmlRangesSchema.TYPE, t);
// Special case, since we permit example numbers for fixed line/mobile to be valid for the
// combined range as well.
//
// This logic smells, since it reveals information about the XML structure (in which fixed
// line and mobile ranges can overlap). However if we insist that a fixed line examples are
// in the "fixed line only" range, we end up with problems if (mobile == fixed line), since
// there is no "fixed line only" range (but there is an example number in the XML).
if (t == ValidNumberType.MOBILE || t == ValidNumberType.FIXED_LINE) {
ranges = ranges.union(
regionTable.getRanges(XmlRangesSchema.TYPE, ValidNumberType.FIXED_LINE_OR_MOBILE));
}
checkMetadata(ranges.contains(exampleNumber),
"invalid example number '%s' of type %s in region %s", exampleNumber, t, r);
}
}
return ImmutableTable.copyOf(exampleNumbers);
}
public abstract Attributes getAttributes();
// TODO: Inline the wrapper methods below.
/** Returns the unique calling code of this numbering scheme. */
public DigitSequence getCallingCode() {
return getAttributes().getCallingCode();
}
/**
* Returns the regions represented by this numbering scheme. The main region is always present
* and listed first, and remaining regions are listed in "natural" order.
*/
public ImmutableSet<PhoneRegion> getRegions() {
return getAttributes().getRegions();
}
/**
* Returns a range table containing per-range attributes according to
* {@link XmlRangesSchema#COLUMNS}.
*/
public abstract RangeTable getTable();
/**
* Returns a RangeTable restricted to the given region, which conforms to the
* {@link XmlRangesSchema} schema, with the exception that no region columns exist.
*/
public final RangeTable getTableFor(PhoneRegion region) {
checkArgument(getRegions().contains(region),
"invalid region '%s' for calling code '%s'", region, getCallingCode());
return getTable()
.subTable(getTable().getRanges(REGIONS.getColumn(region), TRUE), PER_REGION_COLUMNS);
}
public abstract ImmutableSortedMap<PhoneRegion, RangeTable> getShortcodes();
/** Returns the RangeTable for the shortcodes of the given region. */
public final Optional<RangeTable> getShortcodesFor(PhoneRegion region) {
checkArgument(getRegions().contains(region),
"invalid region '%s' for calling code '%s'", region, getCallingCode());
return Optional.ofNullable(getShortcodes().get(region));
}
/** Returns the map of format ID to format specifier. */
public abstract ImmutableMap<String, FormatSpec> getFormats();
/** Returns a list of alternate formats which are also expected for this numbering scheme. */
public abstract ImmutableList<AltFormatSpec> getAlternateFormats();
/** Returns a table of example numbers for each region code and number type. */
public abstract ImmutableTable<PhoneRegion, ValidNumberType, DigitSequence> getExampleNumbers();
/**
* Returns all comments known about by this numbering scheme. Internal method, callers should
* always use {@link #getComments(Anchor)} instead.
*/
abstract ImmutableList<Comment> getAllComments();
/** Returns comments with a specified anchor for this numbering scheme. */
public ImmutableList<Comment> getComments(Anchor anchor) {
checkArgument(getAttributes().getRegions().contains(anchor.region()),
"invalid region: %s", anchor.region());
return getAllComments().stream()
.filter(c -> c.getAnchor().equals(anchor))
.collect(toImmutableList());
}
/**
* An encapsulation of a comment to be associated with an element in the XML. Rather than have
* many APIs for setting/getting comments on a {@link NumberingScheme}, the approach taken here
* is to let comments describe for themselves where they go but keep them in one big bucket.
* <p>
* This simplifies a lot of the intermediate APIs in the builders, but is less efficient (since
* finding comments is now a linear search). If this is ever an issue, they should be mapped by
* key, using a {@code ListMultimap<String, Comment>} (since comments are also ordered by their
* number).
*/
@AutoValue
public abstract static class Comment {
private static final Joiner JOIN_LINES = Joiner.on('\n');
private static final Splitter SPLIT_LINES = Splitter.on('\n');
/** An anchor defining which element, in which territory, a comment should be attached to. */
@AutoValue
public abstract static class Anchor implements Comparable<Anchor> {
// Special anchor for comments that are not stored in the comment table, but are attached to
// data directly (e.g. formats).
private static final Anchor ANONYMOUS = of(PhoneRegion.getUnknown(), "");
private static final Comparator<Anchor> ORDERING =
comparing(Anchor::region).thenComparing(Anchor::label);
/** Creates a comment anchor from a region and xml type. */
static Anchor of(PhoneRegion region, String label) {
// TODO: Add check for valid label.
return anchor(region, label);
}
/** The region of the territory this comment should be attached to. */
public abstract PhoneRegion region();
/**
* The type in the territory this comment should be attached to. If missing, attach this
* comment to the main comment block for the territory.
*/
public abstract String label();
@Override
public int compareTo(Anchor that) {
return ORDERING.compare(this, that);
}
}
// Private since we want to funnel people through type safe factory methods.
private static Anchor anchor(PhoneRegion region, String label) {
return new AutoValue_NumberingScheme_Comment_Anchor(region, label);
}
/** Returns a key identifying a comment for a region. */
public static Anchor anchor(PhoneRegion region) {
return anchor(region, "XML");
}
/** Returns a key identifying a comment for the validation range of a given type in a region. */
public static Anchor anchor(PhoneRegion region, XmlNumberType xmlType) {
return anchor(region, xmlType.toString());
}
/**
* Returns a key identifying a comment for the validation range of a given shortcode type in
* a region.
*/
public static Anchor shortcodeAnchor(PhoneRegion region) {
return anchor(region, "SC");
}
/**
* Returns a key identifying a comment for the validation range of a given shortcode type in
* a region.
*/
public static Anchor shortcodeAnchor(PhoneRegion region, XmlShortcodeType xmlType) {
return anchor(region, xmlType.toString());
}
/** Creates a comment the applies to data identified by the specified key. */
public static Comment create(Anchor anchor, List<String> lines) {
return new AutoValue_NumberingScheme_Comment(anchor, ImmutableList.copyOf(lines));
}
/** Creates a comment the applies to data identified by the specified key. */
public static Comment createAnonymous(List<String> lines) {
return new AutoValue_NumberingScheme_Comment(Anchor.ANONYMOUS, ImmutableList.copyOf(lines));
}
public static Comment fromText(Anchor anchor, String text) {
return create(anchor, SPLIT_LINES.splitToList(text));
}
public static Comment fromText(String text) {
return createAnonymous(SPLIT_LINES.splitToList(text));
}
/**
* Returns the key which defines what this comment relates to (and thus where it should appear
* in the XML file).
*/
public abstract Anchor getAnchor();
/** The lines of a single mulit-line comment. */
// TODO: Switch to a single string (with newlines) which is what's done elsewhere.
public abstract ImmutableList<String> getLines();
public String toText() {
return JOIN_LINES.join(getLines());
}
// Visible for AutoValue.
Comment() {}
}
// Visible for AutoValue.
NumberingScheme() {}
}
|
googleapis/google-cloud-java | 36,907 | java-video-intelligence/proto-google-cloud-video-intelligence-v1p2beta1/src/main/java/com/google/cloud/videointelligence/v1p2beta1/TextAnnotation.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/videointelligence/v1p2beta1/video_intelligence.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.videointelligence.v1p2beta1;
/**
*
*
* <pre>
* Annotations related to one detected OCR text snippet. This will contain the
* corresponding text, confidence value, and frame level information for each
* detection.
* </pre>
*
* Protobuf type {@code google.cloud.videointelligence.v1p2beta1.TextAnnotation}
*/
public final class TextAnnotation extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.videointelligence.v1p2beta1.TextAnnotation)
TextAnnotationOrBuilder {
private static final long serialVersionUID = 0L;
// Use TextAnnotation.newBuilder() to construct.
private TextAnnotation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TextAnnotation() {
text_ = "";
segments_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new TextAnnotation();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.videointelligence.v1p2beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p2beta1_TextAnnotation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.videointelligence.v1p2beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p2beta1_TextAnnotation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.videointelligence.v1p2beta1.TextAnnotation.class,
com.google.cloud.videointelligence.v1p2beta1.TextAnnotation.Builder.class);
}
public static final int TEXT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object text_ = "";
/**
*
*
* <pre>
* The detected text.
* </pre>
*
* <code>string text = 1;</code>
*
* @return The text.
*/
@java.lang.Override
public java.lang.String getText() {
java.lang.Object ref = text_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
text_ = s;
return s;
}
}
/**
*
*
* <pre>
* The detected text.
* </pre>
*
* <code>string text = 1;</code>
*
* @return The bytes for text.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTextBytes() {
java.lang.Object ref = text_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
text_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SEGMENTS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.videointelligence.v1p2beta1.TextSegment> segments_;
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p2beta1.TextSegment segments = 2;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.videointelligence.v1p2beta1.TextSegment>
getSegmentsList() {
return segments_;
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p2beta1.TextSegment segments = 2;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.videointelligence.v1p2beta1.TextSegmentOrBuilder>
getSegmentsOrBuilderList() {
return segments_;
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p2beta1.TextSegment segments = 2;</code>
*/
@java.lang.Override
public int getSegmentsCount() {
return segments_.size();
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p2beta1.TextSegment segments = 2;</code>
*/
@java.lang.Override
public com.google.cloud.videointelligence.v1p2beta1.TextSegment getSegments(int index) {
return segments_.get(index);
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p2beta1.TextSegment segments = 2;</code>
*/
@java.lang.Override
public com.google.cloud.videointelligence.v1p2beta1.TextSegmentOrBuilder getSegmentsOrBuilder(
int index) {
return segments_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(text_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, text_);
}
for (int i = 0; i < segments_.size(); i++) {
output.writeMessage(2, segments_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(text_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, text_);
}
for (int i = 0; i < segments_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, segments_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.videointelligence.v1p2beta1.TextAnnotation)) {
return super.equals(obj);
}
com.google.cloud.videointelligence.v1p2beta1.TextAnnotation other =
(com.google.cloud.videointelligence.v1p2beta1.TextAnnotation) obj;
if (!getText().equals(other.getText())) return false;
if (!getSegmentsList().equals(other.getSegmentsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + TEXT_FIELD_NUMBER;
hash = (53 * hash) + getText().hashCode();
if (getSegmentsCount() > 0) {
hash = (37 * hash) + SEGMENTS_FIELD_NUMBER;
hash = (53 * hash) + getSegmentsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.videointelligence.v1p2beta1.TextAnnotation parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1p2beta1.TextAnnotation parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p2beta1.TextAnnotation parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1p2beta1.TextAnnotation parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p2beta1.TextAnnotation parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1p2beta1.TextAnnotation parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p2beta1.TextAnnotation parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1p2beta1.TextAnnotation parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p2beta1.TextAnnotation parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1p2beta1.TextAnnotation parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p2beta1.TextAnnotation parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1p2beta1.TextAnnotation parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.videointelligence.v1p2beta1.TextAnnotation prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Annotations related to one detected OCR text snippet. This will contain the
* corresponding text, confidence value, and frame level information for each
* detection.
* </pre>
*
* Protobuf type {@code google.cloud.videointelligence.v1p2beta1.TextAnnotation}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.videointelligence.v1p2beta1.TextAnnotation)
com.google.cloud.videointelligence.v1p2beta1.TextAnnotationOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.videointelligence.v1p2beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p2beta1_TextAnnotation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.videointelligence.v1p2beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p2beta1_TextAnnotation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.videointelligence.v1p2beta1.TextAnnotation.class,
com.google.cloud.videointelligence.v1p2beta1.TextAnnotation.Builder.class);
}
// Construct using com.google.cloud.videointelligence.v1p2beta1.TextAnnotation.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
text_ = "";
if (segmentsBuilder_ == null) {
segments_ = java.util.Collections.emptyList();
} else {
segments_ = null;
segmentsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.videointelligence.v1p2beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p2beta1_TextAnnotation_descriptor;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p2beta1.TextAnnotation getDefaultInstanceForType() {
return com.google.cloud.videointelligence.v1p2beta1.TextAnnotation.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p2beta1.TextAnnotation build() {
com.google.cloud.videointelligence.v1p2beta1.TextAnnotation result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p2beta1.TextAnnotation buildPartial() {
com.google.cloud.videointelligence.v1p2beta1.TextAnnotation result =
new com.google.cloud.videointelligence.v1p2beta1.TextAnnotation(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.videointelligence.v1p2beta1.TextAnnotation result) {
if (segmentsBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
segments_ = java.util.Collections.unmodifiableList(segments_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.segments_ = segments_;
} else {
result.segments_ = segmentsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.videointelligence.v1p2beta1.TextAnnotation result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.text_ = text_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.videointelligence.v1p2beta1.TextAnnotation) {
return mergeFrom((com.google.cloud.videointelligence.v1p2beta1.TextAnnotation) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.videointelligence.v1p2beta1.TextAnnotation other) {
if (other == com.google.cloud.videointelligence.v1p2beta1.TextAnnotation.getDefaultInstance())
return this;
if (!other.getText().isEmpty()) {
text_ = other.text_;
bitField0_ |= 0x00000001;
onChanged();
}
if (segmentsBuilder_ == null) {
if (!other.segments_.isEmpty()) {
if (segments_.isEmpty()) {
segments_ = other.segments_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureSegmentsIsMutable();
segments_.addAll(other.segments_);
}
onChanged();
}
} else {
if (!other.segments_.isEmpty()) {
if (segmentsBuilder_.isEmpty()) {
segmentsBuilder_.dispose();
segmentsBuilder_ = null;
segments_ = other.segments_;
bitField0_ = (bitField0_ & ~0x00000002);
segmentsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getSegmentsFieldBuilder()
: null;
} else {
segmentsBuilder_.addAllMessages(other.segments_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
text_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
com.google.cloud.videointelligence.v1p2beta1.TextSegment m =
input.readMessage(
com.google.cloud.videointelligence.v1p2beta1.TextSegment.parser(),
extensionRegistry);
if (segmentsBuilder_ == null) {
ensureSegmentsIsMutable();
segments_.add(m);
} else {
segmentsBuilder_.addMessage(m);
}
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object text_ = "";
/**
*
*
* <pre>
* The detected text.
* </pre>
*
* <code>string text = 1;</code>
*
* @return The text.
*/
public java.lang.String getText() {
java.lang.Object ref = text_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
text_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The detected text.
* </pre>
*
* <code>string text = 1;</code>
*
* @return The bytes for text.
*/
public com.google.protobuf.ByteString getTextBytes() {
java.lang.Object ref = text_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
text_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The detected text.
* </pre>
*
* <code>string text = 1;</code>
*
* @param value The text to set.
* @return This builder for chaining.
*/
public Builder setText(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
text_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The detected text.
* </pre>
*
* <code>string text = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearText() {
text_ = getDefaultInstance().getText();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* The detected text.
* </pre>
*
* <code>string text = 1;</code>
*
* @param value The bytes for text to set.
* @return This builder for chaining.
*/
public Builder setTextBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
text_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.util.List<com.google.cloud.videointelligence.v1p2beta1.TextSegment> segments_ =
java.util.Collections.emptyList();
private void ensureSegmentsIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
segments_ =
new java.util.ArrayList<com.google.cloud.videointelligence.v1p2beta1.TextSegment>(
segments_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.videointelligence.v1p2beta1.TextSegment,
com.google.cloud.videointelligence.v1p2beta1.TextSegment.Builder,
com.google.cloud.videointelligence.v1p2beta1.TextSegmentOrBuilder>
segmentsBuilder_;
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p2beta1.TextSegment segments = 2;</code>
*/
public java.util.List<com.google.cloud.videointelligence.v1p2beta1.TextSegment>
getSegmentsList() {
if (segmentsBuilder_ == null) {
return java.util.Collections.unmodifiableList(segments_);
} else {
return segmentsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p2beta1.TextSegment segments = 2;</code>
*/
public int getSegmentsCount() {
if (segmentsBuilder_ == null) {
return segments_.size();
} else {
return segmentsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p2beta1.TextSegment segments = 2;</code>
*/
public com.google.cloud.videointelligence.v1p2beta1.TextSegment getSegments(int index) {
if (segmentsBuilder_ == null) {
return segments_.get(index);
} else {
return segmentsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p2beta1.TextSegment segments = 2;</code>
*/
public Builder setSegments(
int index, com.google.cloud.videointelligence.v1p2beta1.TextSegment value) {
if (segmentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSegmentsIsMutable();
segments_.set(index, value);
onChanged();
} else {
segmentsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p2beta1.TextSegment segments = 2;</code>
*/
public Builder setSegments(
int index,
com.google.cloud.videointelligence.v1p2beta1.TextSegment.Builder builderForValue) {
if (segmentsBuilder_ == null) {
ensureSegmentsIsMutable();
segments_.set(index, builderForValue.build());
onChanged();
} else {
segmentsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p2beta1.TextSegment segments = 2;</code>
*/
public Builder addSegments(com.google.cloud.videointelligence.v1p2beta1.TextSegment value) {
if (segmentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSegmentsIsMutable();
segments_.add(value);
onChanged();
} else {
segmentsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p2beta1.TextSegment segments = 2;</code>
*/
public Builder addSegments(
int index, com.google.cloud.videointelligence.v1p2beta1.TextSegment value) {
if (segmentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSegmentsIsMutable();
segments_.add(index, value);
onChanged();
} else {
segmentsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p2beta1.TextSegment segments = 2;</code>
*/
public Builder addSegments(
com.google.cloud.videointelligence.v1p2beta1.TextSegment.Builder builderForValue) {
if (segmentsBuilder_ == null) {
ensureSegmentsIsMutable();
segments_.add(builderForValue.build());
onChanged();
} else {
segmentsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p2beta1.TextSegment segments = 2;</code>
*/
public Builder addSegments(
int index,
com.google.cloud.videointelligence.v1p2beta1.TextSegment.Builder builderForValue) {
if (segmentsBuilder_ == null) {
ensureSegmentsIsMutable();
segments_.add(index, builderForValue.build());
onChanged();
} else {
segmentsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p2beta1.TextSegment segments = 2;</code>
*/
public Builder addAllSegments(
java.lang.Iterable<? extends com.google.cloud.videointelligence.v1p2beta1.TextSegment>
values) {
if (segmentsBuilder_ == null) {
ensureSegmentsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, segments_);
onChanged();
} else {
segmentsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p2beta1.TextSegment segments = 2;</code>
*/
public Builder clearSegments() {
if (segmentsBuilder_ == null) {
segments_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
segmentsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p2beta1.TextSegment segments = 2;</code>
*/
public Builder removeSegments(int index) {
if (segmentsBuilder_ == null) {
ensureSegmentsIsMutable();
segments_.remove(index);
onChanged();
} else {
segmentsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p2beta1.TextSegment segments = 2;</code>
*/
public com.google.cloud.videointelligence.v1p2beta1.TextSegment.Builder getSegmentsBuilder(
int index) {
return getSegmentsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p2beta1.TextSegment segments = 2;</code>
*/
public com.google.cloud.videointelligence.v1p2beta1.TextSegmentOrBuilder getSegmentsOrBuilder(
int index) {
if (segmentsBuilder_ == null) {
return segments_.get(index);
} else {
return segmentsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p2beta1.TextSegment segments = 2;</code>
*/
public java.util.List<
? extends com.google.cloud.videointelligence.v1p2beta1.TextSegmentOrBuilder>
getSegmentsOrBuilderList() {
if (segmentsBuilder_ != null) {
return segmentsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(segments_);
}
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p2beta1.TextSegment segments = 2;</code>
*/
public com.google.cloud.videointelligence.v1p2beta1.TextSegment.Builder addSegmentsBuilder() {
return getSegmentsFieldBuilder()
.addBuilder(
com.google.cloud.videointelligence.v1p2beta1.TextSegment.getDefaultInstance());
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p2beta1.TextSegment segments = 2;</code>
*/
public com.google.cloud.videointelligence.v1p2beta1.TextSegment.Builder addSegmentsBuilder(
int index) {
return getSegmentsFieldBuilder()
.addBuilder(
index, com.google.cloud.videointelligence.v1p2beta1.TextSegment.getDefaultInstance());
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p2beta1.TextSegment segments = 2;</code>
*/
public java.util.List<com.google.cloud.videointelligence.v1p2beta1.TextSegment.Builder>
getSegmentsBuilderList() {
return getSegmentsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.videointelligence.v1p2beta1.TextSegment,
com.google.cloud.videointelligence.v1p2beta1.TextSegment.Builder,
com.google.cloud.videointelligence.v1p2beta1.TextSegmentOrBuilder>
getSegmentsFieldBuilder() {
if (segmentsBuilder_ == null) {
segmentsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.videointelligence.v1p2beta1.TextSegment,
com.google.cloud.videointelligence.v1p2beta1.TextSegment.Builder,
com.google.cloud.videointelligence.v1p2beta1.TextSegmentOrBuilder>(
segments_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean());
segments_ = null;
}
return segmentsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.videointelligence.v1p2beta1.TextAnnotation)
}
// @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1p2beta1.TextAnnotation)
private static final com.google.cloud.videointelligence.v1p2beta1.TextAnnotation DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.videointelligence.v1p2beta1.TextAnnotation();
}
public static com.google.cloud.videointelligence.v1p2beta1.TextAnnotation getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<TextAnnotation> PARSER =
new com.google.protobuf.AbstractParser<TextAnnotation>() {
@java.lang.Override
public TextAnnotation parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<TextAnnotation> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<TextAnnotation> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p2beta1.TextAnnotation getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,907 | java-video-intelligence/proto-google-cloud-video-intelligence-v1p3beta1/src/main/java/com/google/cloud/videointelligence/v1p3beta1/TextAnnotation.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/videointelligence/v1p3beta1/video_intelligence.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.videointelligence.v1p3beta1;
/**
*
*
* <pre>
* Annotations related to one detected OCR text snippet. This will contain the
* corresponding text, confidence value, and frame level information for each
* detection.
* </pre>
*
* Protobuf type {@code google.cloud.videointelligence.v1p3beta1.TextAnnotation}
*/
public final class TextAnnotation extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.videointelligence.v1p3beta1.TextAnnotation)
TextAnnotationOrBuilder {
private static final long serialVersionUID = 0L;
// Use TextAnnotation.newBuilder() to construct.
private TextAnnotation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private TextAnnotation() {
text_ = "";
segments_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new TextAnnotation();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p3beta1_TextAnnotation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p3beta1_TextAnnotation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.videointelligence.v1p3beta1.TextAnnotation.class,
com.google.cloud.videointelligence.v1p3beta1.TextAnnotation.Builder.class);
}
public static final int TEXT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object text_ = "";
/**
*
*
* <pre>
* The detected text.
* </pre>
*
* <code>string text = 1;</code>
*
* @return The text.
*/
@java.lang.Override
public java.lang.String getText() {
java.lang.Object ref = text_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
text_ = s;
return s;
}
}
/**
*
*
* <pre>
* The detected text.
* </pre>
*
* <code>string text = 1;</code>
*
* @return The bytes for text.
*/
@java.lang.Override
public com.google.protobuf.ByteString getTextBytes() {
java.lang.Object ref = text_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
text_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SEGMENTS_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.videointelligence.v1p3beta1.TextSegment> segments_;
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p3beta1.TextSegment segments = 2;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.videointelligence.v1p3beta1.TextSegment>
getSegmentsList() {
return segments_;
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p3beta1.TextSegment segments = 2;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.videointelligence.v1p3beta1.TextSegmentOrBuilder>
getSegmentsOrBuilderList() {
return segments_;
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p3beta1.TextSegment segments = 2;</code>
*/
@java.lang.Override
public int getSegmentsCount() {
return segments_.size();
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p3beta1.TextSegment segments = 2;</code>
*/
@java.lang.Override
public com.google.cloud.videointelligence.v1p3beta1.TextSegment getSegments(int index) {
return segments_.get(index);
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p3beta1.TextSegment segments = 2;</code>
*/
@java.lang.Override
public com.google.cloud.videointelligence.v1p3beta1.TextSegmentOrBuilder getSegmentsOrBuilder(
int index) {
return segments_.get(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(text_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, text_);
}
for (int i = 0; i < segments_.size(); i++) {
output.writeMessage(2, segments_.get(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(text_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, text_);
}
for (int i = 0; i < segments_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, segments_.get(i));
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.videointelligence.v1p3beta1.TextAnnotation)) {
return super.equals(obj);
}
com.google.cloud.videointelligence.v1p3beta1.TextAnnotation other =
(com.google.cloud.videointelligence.v1p3beta1.TextAnnotation) obj;
if (!getText().equals(other.getText())) return false;
if (!getSegmentsList().equals(other.getSegmentsList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + TEXT_FIELD_NUMBER;
hash = (53 * hash) + getText().hashCode();
if (getSegmentsCount() > 0) {
hash = (37 * hash) + SEGMENTS_FIELD_NUMBER;
hash = (53 * hash) + getSegmentsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.videointelligence.v1p3beta1.TextAnnotation parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1p3beta1.TextAnnotation parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p3beta1.TextAnnotation parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1p3beta1.TextAnnotation parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p3beta1.TextAnnotation parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.videointelligence.v1p3beta1.TextAnnotation parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p3beta1.TextAnnotation parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1p3beta1.TextAnnotation parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p3beta1.TextAnnotation parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1p3beta1.TextAnnotation parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.videointelligence.v1p3beta1.TextAnnotation parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.videointelligence.v1p3beta1.TextAnnotation parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.videointelligence.v1p3beta1.TextAnnotation prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Annotations related to one detected OCR text snippet. This will contain the
* corresponding text, confidence value, and frame level information for each
* detection.
* </pre>
*
* Protobuf type {@code google.cloud.videointelligence.v1p3beta1.TextAnnotation}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.videointelligence.v1p3beta1.TextAnnotation)
com.google.cloud.videointelligence.v1p3beta1.TextAnnotationOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p3beta1_TextAnnotation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p3beta1_TextAnnotation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.videointelligence.v1p3beta1.TextAnnotation.class,
com.google.cloud.videointelligence.v1p3beta1.TextAnnotation.Builder.class);
}
// Construct using com.google.cloud.videointelligence.v1p3beta1.TextAnnotation.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
text_ = "";
if (segmentsBuilder_ == null) {
segments_ = java.util.Collections.emptyList();
} else {
segments_ = null;
segmentsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000002);
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.videointelligence.v1p3beta1.VideoIntelligenceServiceProto
.internal_static_google_cloud_videointelligence_v1p3beta1_TextAnnotation_descriptor;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p3beta1.TextAnnotation getDefaultInstanceForType() {
return com.google.cloud.videointelligence.v1p3beta1.TextAnnotation.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p3beta1.TextAnnotation build() {
com.google.cloud.videointelligence.v1p3beta1.TextAnnotation result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p3beta1.TextAnnotation buildPartial() {
com.google.cloud.videointelligence.v1p3beta1.TextAnnotation result =
new com.google.cloud.videointelligence.v1p3beta1.TextAnnotation(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.videointelligence.v1p3beta1.TextAnnotation result) {
if (segmentsBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)) {
segments_ = java.util.Collections.unmodifiableList(segments_);
bitField0_ = (bitField0_ & ~0x00000002);
}
result.segments_ = segments_;
} else {
result.segments_ = segmentsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.videointelligence.v1p3beta1.TextAnnotation result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.text_ = text_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.videointelligence.v1p3beta1.TextAnnotation) {
return mergeFrom((com.google.cloud.videointelligence.v1p3beta1.TextAnnotation) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.videointelligence.v1p3beta1.TextAnnotation other) {
if (other == com.google.cloud.videointelligence.v1p3beta1.TextAnnotation.getDefaultInstance())
return this;
if (!other.getText().isEmpty()) {
text_ = other.text_;
bitField0_ |= 0x00000001;
onChanged();
}
if (segmentsBuilder_ == null) {
if (!other.segments_.isEmpty()) {
if (segments_.isEmpty()) {
segments_ = other.segments_;
bitField0_ = (bitField0_ & ~0x00000002);
} else {
ensureSegmentsIsMutable();
segments_.addAll(other.segments_);
}
onChanged();
}
} else {
if (!other.segments_.isEmpty()) {
if (segmentsBuilder_.isEmpty()) {
segmentsBuilder_.dispose();
segmentsBuilder_ = null;
segments_ = other.segments_;
bitField0_ = (bitField0_ & ~0x00000002);
segmentsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getSegmentsFieldBuilder()
: null;
} else {
segmentsBuilder_.addAllMessages(other.segments_);
}
}
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
text_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
com.google.cloud.videointelligence.v1p3beta1.TextSegment m =
input.readMessage(
com.google.cloud.videointelligence.v1p3beta1.TextSegment.parser(),
extensionRegistry);
if (segmentsBuilder_ == null) {
ensureSegmentsIsMutable();
segments_.add(m);
} else {
segmentsBuilder_.addMessage(m);
}
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object text_ = "";
/**
*
*
* <pre>
* The detected text.
* </pre>
*
* <code>string text = 1;</code>
*
* @return The text.
*/
public java.lang.String getText() {
java.lang.Object ref = text_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
text_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The detected text.
* </pre>
*
* <code>string text = 1;</code>
*
* @return The bytes for text.
*/
public com.google.protobuf.ByteString getTextBytes() {
java.lang.Object ref = text_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
text_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The detected text.
* </pre>
*
* <code>string text = 1;</code>
*
* @param value The text to set.
* @return This builder for chaining.
*/
public Builder setText(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
text_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The detected text.
* </pre>
*
* <code>string text = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearText() {
text_ = getDefaultInstance().getText();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* The detected text.
* </pre>
*
* <code>string text = 1;</code>
*
* @param value The bytes for text to set.
* @return This builder for chaining.
*/
public Builder setTextBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
text_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.util.List<com.google.cloud.videointelligence.v1p3beta1.TextSegment> segments_ =
java.util.Collections.emptyList();
private void ensureSegmentsIsMutable() {
if (!((bitField0_ & 0x00000002) != 0)) {
segments_ =
new java.util.ArrayList<com.google.cloud.videointelligence.v1p3beta1.TextSegment>(
segments_);
bitField0_ |= 0x00000002;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.videointelligence.v1p3beta1.TextSegment,
com.google.cloud.videointelligence.v1p3beta1.TextSegment.Builder,
com.google.cloud.videointelligence.v1p3beta1.TextSegmentOrBuilder>
segmentsBuilder_;
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p3beta1.TextSegment segments = 2;</code>
*/
public java.util.List<com.google.cloud.videointelligence.v1p3beta1.TextSegment>
getSegmentsList() {
if (segmentsBuilder_ == null) {
return java.util.Collections.unmodifiableList(segments_);
} else {
return segmentsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p3beta1.TextSegment segments = 2;</code>
*/
public int getSegmentsCount() {
if (segmentsBuilder_ == null) {
return segments_.size();
} else {
return segmentsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p3beta1.TextSegment segments = 2;</code>
*/
public com.google.cloud.videointelligence.v1p3beta1.TextSegment getSegments(int index) {
if (segmentsBuilder_ == null) {
return segments_.get(index);
} else {
return segmentsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p3beta1.TextSegment segments = 2;</code>
*/
public Builder setSegments(
int index, com.google.cloud.videointelligence.v1p3beta1.TextSegment value) {
if (segmentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSegmentsIsMutable();
segments_.set(index, value);
onChanged();
} else {
segmentsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p3beta1.TextSegment segments = 2;</code>
*/
public Builder setSegments(
int index,
com.google.cloud.videointelligence.v1p3beta1.TextSegment.Builder builderForValue) {
if (segmentsBuilder_ == null) {
ensureSegmentsIsMutable();
segments_.set(index, builderForValue.build());
onChanged();
} else {
segmentsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p3beta1.TextSegment segments = 2;</code>
*/
public Builder addSegments(com.google.cloud.videointelligence.v1p3beta1.TextSegment value) {
if (segmentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSegmentsIsMutable();
segments_.add(value);
onChanged();
} else {
segmentsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p3beta1.TextSegment segments = 2;</code>
*/
public Builder addSegments(
int index, com.google.cloud.videointelligence.v1p3beta1.TextSegment value) {
if (segmentsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureSegmentsIsMutable();
segments_.add(index, value);
onChanged();
} else {
segmentsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p3beta1.TextSegment segments = 2;</code>
*/
public Builder addSegments(
com.google.cloud.videointelligence.v1p3beta1.TextSegment.Builder builderForValue) {
if (segmentsBuilder_ == null) {
ensureSegmentsIsMutable();
segments_.add(builderForValue.build());
onChanged();
} else {
segmentsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p3beta1.TextSegment segments = 2;</code>
*/
public Builder addSegments(
int index,
com.google.cloud.videointelligence.v1p3beta1.TextSegment.Builder builderForValue) {
if (segmentsBuilder_ == null) {
ensureSegmentsIsMutable();
segments_.add(index, builderForValue.build());
onChanged();
} else {
segmentsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p3beta1.TextSegment segments = 2;</code>
*/
public Builder addAllSegments(
java.lang.Iterable<? extends com.google.cloud.videointelligence.v1p3beta1.TextSegment>
values) {
if (segmentsBuilder_ == null) {
ensureSegmentsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, segments_);
onChanged();
} else {
segmentsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p3beta1.TextSegment segments = 2;</code>
*/
public Builder clearSegments() {
if (segmentsBuilder_ == null) {
segments_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
} else {
segmentsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p3beta1.TextSegment segments = 2;</code>
*/
public Builder removeSegments(int index) {
if (segmentsBuilder_ == null) {
ensureSegmentsIsMutable();
segments_.remove(index);
onChanged();
} else {
segmentsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p3beta1.TextSegment segments = 2;</code>
*/
public com.google.cloud.videointelligence.v1p3beta1.TextSegment.Builder getSegmentsBuilder(
int index) {
return getSegmentsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p3beta1.TextSegment segments = 2;</code>
*/
public com.google.cloud.videointelligence.v1p3beta1.TextSegmentOrBuilder getSegmentsOrBuilder(
int index) {
if (segmentsBuilder_ == null) {
return segments_.get(index);
} else {
return segmentsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p3beta1.TextSegment segments = 2;</code>
*/
public java.util.List<
? extends com.google.cloud.videointelligence.v1p3beta1.TextSegmentOrBuilder>
getSegmentsOrBuilderList() {
if (segmentsBuilder_ != null) {
return segmentsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(segments_);
}
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p3beta1.TextSegment segments = 2;</code>
*/
public com.google.cloud.videointelligence.v1p3beta1.TextSegment.Builder addSegmentsBuilder() {
return getSegmentsFieldBuilder()
.addBuilder(
com.google.cloud.videointelligence.v1p3beta1.TextSegment.getDefaultInstance());
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p3beta1.TextSegment segments = 2;</code>
*/
public com.google.cloud.videointelligence.v1p3beta1.TextSegment.Builder addSegmentsBuilder(
int index) {
return getSegmentsFieldBuilder()
.addBuilder(
index, com.google.cloud.videointelligence.v1p3beta1.TextSegment.getDefaultInstance());
}
/**
*
*
* <pre>
* All video segments where OCR detected text appears.
* </pre>
*
* <code>repeated .google.cloud.videointelligence.v1p3beta1.TextSegment segments = 2;</code>
*/
public java.util.List<com.google.cloud.videointelligence.v1p3beta1.TextSegment.Builder>
getSegmentsBuilderList() {
return getSegmentsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.videointelligence.v1p3beta1.TextSegment,
com.google.cloud.videointelligence.v1p3beta1.TextSegment.Builder,
com.google.cloud.videointelligence.v1p3beta1.TextSegmentOrBuilder>
getSegmentsFieldBuilder() {
if (segmentsBuilder_ == null) {
segmentsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.videointelligence.v1p3beta1.TextSegment,
com.google.cloud.videointelligence.v1p3beta1.TextSegment.Builder,
com.google.cloud.videointelligence.v1p3beta1.TextSegmentOrBuilder>(
segments_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean());
segments_ = null;
}
return segmentsBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.videointelligence.v1p3beta1.TextAnnotation)
}
// @@protoc_insertion_point(class_scope:google.cloud.videointelligence.v1p3beta1.TextAnnotation)
private static final com.google.cloud.videointelligence.v1p3beta1.TextAnnotation DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.videointelligence.v1p3beta1.TextAnnotation();
}
public static com.google.cloud.videointelligence.v1p3beta1.TextAnnotation getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<TextAnnotation> PARSER =
new com.google.protobuf.AbstractParser<TextAnnotation>() {
@java.lang.Override
public TextAnnotation parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<TextAnnotation> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<TextAnnotation> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.videointelligence.v1p3beta1.TextAnnotation getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,978 | java-managed-identities/proto-google-cloud-managed-identities-v1/src/main/java/com/google/cloud/managedidentities/v1/UpdateDomainRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/managedidentities/v1/managed_identities_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.managedidentities.v1;
/**
*
*
* <pre>
* Request message for
* [UpdateDomain][google.cloud.managedidentities.v1.UpdateDomain]
* </pre>
*
* Protobuf type {@code google.cloud.managedidentities.v1.UpdateDomainRequest}
*/
public final class UpdateDomainRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.managedidentities.v1.UpdateDomainRequest)
UpdateDomainRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use UpdateDomainRequest.newBuilder() to construct.
private UpdateDomainRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private UpdateDomainRequest() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new UpdateDomainRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.managedidentities.v1.ManagedIdentitiesServiceProto
.internal_static_google_cloud_managedidentities_v1_UpdateDomainRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.managedidentities.v1.ManagedIdentitiesServiceProto
.internal_static_google_cloud_managedidentities_v1_UpdateDomainRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.managedidentities.v1.UpdateDomainRequest.class,
com.google.cloud.managedidentities.v1.UpdateDomainRequest.Builder.class);
}
private int bitField0_;
public static final int UPDATE_MASK_FIELD_NUMBER = 1;
private com.google.protobuf.FieldMask updateMask_;
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in this
* field. The elements of the repeated paths field may only include
* fields from [Domain][google.cloud.managedidentities.v1.Domain]:
* * `labels`
* * `locations`
* * `authorized_networks`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
@java.lang.Override
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in this
* field. The elements of the repeated paths field may only include
* fields from [Domain][google.cloud.managedidentities.v1.Domain]:
* * `labels`
* * `locations`
* * `authorized_networks`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
@java.lang.Override
public com.google.protobuf.FieldMask getUpdateMask() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in this
* field. The elements of the repeated paths field may only include
* fields from [Domain][google.cloud.managedidentities.v1.Domain]:
* * `labels`
* * `locations`
* * `authorized_networks`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_;
}
public static final int DOMAIN_FIELD_NUMBER = 2;
private com.google.cloud.managedidentities.v1.Domain domain_;
/**
*
*
* <pre>
* Required. Domain message with updated fields. Only supported fields specified in
* update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.managedidentities.v1.Domain domain = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the domain field is set.
*/
@java.lang.Override
public boolean hasDomain() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Domain message with updated fields. Only supported fields specified in
* update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.managedidentities.v1.Domain domain = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The domain.
*/
@java.lang.Override
public com.google.cloud.managedidentities.v1.Domain getDomain() {
return domain_ == null
? com.google.cloud.managedidentities.v1.Domain.getDefaultInstance()
: domain_;
}
/**
*
*
* <pre>
* Required. Domain message with updated fields. Only supported fields specified in
* update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.managedidentities.v1.Domain domain = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.managedidentities.v1.DomainOrBuilder getDomainOrBuilder() {
return domain_ == null
? com.google.cloud.managedidentities.v1.Domain.getDefaultInstance()
: domain_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getDomain());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getUpdateMask());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getDomain());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.managedidentities.v1.UpdateDomainRequest)) {
return super.equals(obj);
}
com.google.cloud.managedidentities.v1.UpdateDomainRequest other =
(com.google.cloud.managedidentities.v1.UpdateDomainRequest) obj;
if (hasUpdateMask() != other.hasUpdateMask()) return false;
if (hasUpdateMask()) {
if (!getUpdateMask().equals(other.getUpdateMask())) return false;
}
if (hasDomain() != other.hasDomain()) return false;
if (hasDomain()) {
if (!getDomain().equals(other.getDomain())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasUpdateMask()) {
hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER;
hash = (53 * hash) + getUpdateMask().hashCode();
}
if (hasDomain()) {
hash = (37 * hash) + DOMAIN_FIELD_NUMBER;
hash = (53 * hash) + getDomain().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.managedidentities.v1.UpdateDomainRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.managedidentities.v1.UpdateDomainRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.managedidentities.v1.UpdateDomainRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.managedidentities.v1.UpdateDomainRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.managedidentities.v1.UpdateDomainRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.managedidentities.v1.UpdateDomainRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.managedidentities.v1.UpdateDomainRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.managedidentities.v1.UpdateDomainRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.managedidentities.v1.UpdateDomainRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.managedidentities.v1.UpdateDomainRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.managedidentities.v1.UpdateDomainRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.managedidentities.v1.UpdateDomainRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.managedidentities.v1.UpdateDomainRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [UpdateDomain][google.cloud.managedidentities.v1.UpdateDomain]
* </pre>
*
* Protobuf type {@code google.cloud.managedidentities.v1.UpdateDomainRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.managedidentities.v1.UpdateDomainRequest)
com.google.cloud.managedidentities.v1.UpdateDomainRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.managedidentities.v1.ManagedIdentitiesServiceProto
.internal_static_google_cloud_managedidentities_v1_UpdateDomainRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.managedidentities.v1.ManagedIdentitiesServiceProto
.internal_static_google_cloud_managedidentities_v1_UpdateDomainRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.managedidentities.v1.UpdateDomainRequest.class,
com.google.cloud.managedidentities.v1.UpdateDomainRequest.Builder.class);
}
// Construct using com.google.cloud.managedidentities.v1.UpdateDomainRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getUpdateMaskFieldBuilder();
getDomainFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
domain_ = null;
if (domainBuilder_ != null) {
domainBuilder_.dispose();
domainBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.managedidentities.v1.ManagedIdentitiesServiceProto
.internal_static_google_cloud_managedidentities_v1_UpdateDomainRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.managedidentities.v1.UpdateDomainRequest getDefaultInstanceForType() {
return com.google.cloud.managedidentities.v1.UpdateDomainRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.managedidentities.v1.UpdateDomainRequest build() {
com.google.cloud.managedidentities.v1.UpdateDomainRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.managedidentities.v1.UpdateDomainRequest buildPartial() {
com.google.cloud.managedidentities.v1.UpdateDomainRequest result =
new com.google.cloud.managedidentities.v1.UpdateDomainRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.managedidentities.v1.UpdateDomainRequest result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.domain_ = domainBuilder_ == null ? domain_ : domainBuilder_.build();
to_bitField0_ |= 0x00000002;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.managedidentities.v1.UpdateDomainRequest) {
return mergeFrom((com.google.cloud.managedidentities.v1.UpdateDomainRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.managedidentities.v1.UpdateDomainRequest other) {
if (other == com.google.cloud.managedidentities.v1.UpdateDomainRequest.getDefaultInstance())
return this;
if (other.hasUpdateMask()) {
mergeUpdateMask(other.getUpdateMask());
}
if (other.hasDomain()) {
mergeDomain(other.getDomain());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getDomainFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.FieldMask updateMask_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
updateMaskBuilder_;
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in this
* field. The elements of the repeated paths field may only include
* fields from [Domain][google.cloud.managedidentities.v1.Domain]:
* * `labels`
* * `locations`
* * `authorized_networks`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the updateMask field is set.
*/
public boolean hasUpdateMask() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in this
* field. The elements of the repeated paths field may only include
* fields from [Domain][google.cloud.managedidentities.v1.Domain]:
* * `labels`
* * `locations`
* * `authorized_networks`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The updateMask.
*/
public com.google.protobuf.FieldMask getUpdateMask() {
if (updateMaskBuilder_ == null) {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
} else {
return updateMaskBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in this
* field. The elements of the repeated paths field may only include
* fields from [Domain][google.cloud.managedidentities.v1.Domain]:
* * `labels`
* * `locations`
* * `authorized_networks`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateMask_ = value;
} else {
updateMaskBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in this
* field. The elements of the repeated paths field may only include
* fields from [Domain][google.cloud.managedidentities.v1.Domain]:
* * `labels`
* * `locations`
* * `authorized_networks`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) {
if (updateMaskBuilder_ == null) {
updateMask_ = builderForValue.build();
} else {
updateMaskBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in this
* field. The elements of the repeated paths field may only include
* fields from [Domain][google.cloud.managedidentities.v1.Domain]:
* * `labels`
* * `locations`
* * `authorized_networks`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) {
if (updateMaskBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& updateMask_ != null
&& updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) {
getUpdateMaskBuilder().mergeFrom(value);
} else {
updateMask_ = value;
}
} else {
updateMaskBuilder_.mergeFrom(value);
}
if (updateMask_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in this
* field. The elements of the repeated paths field may only include
* fields from [Domain][google.cloud.managedidentities.v1.Domain]:
* * `labels`
* * `locations`
* * `authorized_networks`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearUpdateMask() {
bitField0_ = (bitField0_ & ~0x00000001);
updateMask_ = null;
if (updateMaskBuilder_ != null) {
updateMaskBuilder_.dispose();
updateMaskBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in this
* field. The elements of the repeated paths field may only include
* fields from [Domain][google.cloud.managedidentities.v1.Domain]:
* * `labels`
* * `locations`
* * `authorized_networks`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getUpdateMaskFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in this
* field. The elements of the repeated paths field may only include
* fields from [Domain][google.cloud.managedidentities.v1.Domain]:
* * `labels`
* * `locations`
* * `authorized_networks`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() {
if (updateMaskBuilder_ != null) {
return updateMaskBuilder_.getMessageOrBuilder();
} else {
return updateMask_ == null
? com.google.protobuf.FieldMask.getDefaultInstance()
: updateMask_;
}
}
/**
*
*
* <pre>
* Required. Mask of fields to update. At least one path must be supplied in this
* field. The elements of the repeated paths field may only include
* fields from [Domain][google.cloud.managedidentities.v1.Domain]:
* * `labels`
* * `locations`
* * `authorized_networks`
* </pre>
*
* <code>.google.protobuf.FieldMask update_mask = 1 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>
getUpdateMaskFieldBuilder() {
if (updateMaskBuilder_ == null) {
updateMaskBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.FieldMask,
com.google.protobuf.FieldMask.Builder,
com.google.protobuf.FieldMaskOrBuilder>(
getUpdateMask(), getParentForChildren(), isClean());
updateMask_ = null;
}
return updateMaskBuilder_;
}
private com.google.cloud.managedidentities.v1.Domain domain_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.managedidentities.v1.Domain,
com.google.cloud.managedidentities.v1.Domain.Builder,
com.google.cloud.managedidentities.v1.DomainOrBuilder>
domainBuilder_;
/**
*
*
* <pre>
* Required. Domain message with updated fields. Only supported fields specified in
* update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.managedidentities.v1.Domain domain = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the domain field is set.
*/
public boolean hasDomain() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Domain message with updated fields. Only supported fields specified in
* update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.managedidentities.v1.Domain domain = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The domain.
*/
public com.google.cloud.managedidentities.v1.Domain getDomain() {
if (domainBuilder_ == null) {
return domain_ == null
? com.google.cloud.managedidentities.v1.Domain.getDefaultInstance()
: domain_;
} else {
return domainBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Domain message with updated fields. Only supported fields specified in
* update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.managedidentities.v1.Domain domain = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setDomain(com.google.cloud.managedidentities.v1.Domain value) {
if (domainBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
domain_ = value;
} else {
domainBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Domain message with updated fields. Only supported fields specified in
* update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.managedidentities.v1.Domain domain = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setDomain(com.google.cloud.managedidentities.v1.Domain.Builder builderForValue) {
if (domainBuilder_ == null) {
domain_ = builderForValue.build();
} else {
domainBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Domain message with updated fields. Only supported fields specified in
* update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.managedidentities.v1.Domain domain = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeDomain(com.google.cloud.managedidentities.v1.Domain value) {
if (domainBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& domain_ != null
&& domain_ != com.google.cloud.managedidentities.v1.Domain.getDefaultInstance()) {
getDomainBuilder().mergeFrom(value);
} else {
domain_ = value;
}
} else {
domainBuilder_.mergeFrom(value);
}
if (domain_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Domain message with updated fields. Only supported fields specified in
* update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.managedidentities.v1.Domain domain = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearDomain() {
bitField0_ = (bitField0_ & ~0x00000002);
domain_ = null;
if (domainBuilder_ != null) {
domainBuilder_.dispose();
domainBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Domain message with updated fields. Only supported fields specified in
* update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.managedidentities.v1.Domain domain = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.managedidentities.v1.Domain.Builder getDomainBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getDomainFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Domain message with updated fields. Only supported fields specified in
* update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.managedidentities.v1.Domain domain = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.managedidentities.v1.DomainOrBuilder getDomainOrBuilder() {
if (domainBuilder_ != null) {
return domainBuilder_.getMessageOrBuilder();
} else {
return domain_ == null
? com.google.cloud.managedidentities.v1.Domain.getDefaultInstance()
: domain_;
}
}
/**
*
*
* <pre>
* Required. Domain message with updated fields. Only supported fields specified in
* update_mask are updated.
* </pre>
*
* <code>
* .google.cloud.managedidentities.v1.Domain domain = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.managedidentities.v1.Domain,
com.google.cloud.managedidentities.v1.Domain.Builder,
com.google.cloud.managedidentities.v1.DomainOrBuilder>
getDomainFieldBuilder() {
if (domainBuilder_ == null) {
domainBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.managedidentities.v1.Domain,
com.google.cloud.managedidentities.v1.Domain.Builder,
com.google.cloud.managedidentities.v1.DomainOrBuilder>(
getDomain(), getParentForChildren(), isClean());
domain_ = null;
}
return domainBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.managedidentities.v1.UpdateDomainRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.managedidentities.v1.UpdateDomainRequest)
private static final com.google.cloud.managedidentities.v1.UpdateDomainRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.managedidentities.v1.UpdateDomainRequest();
}
public static com.google.cloud.managedidentities.v1.UpdateDomainRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<UpdateDomainRequest> PARSER =
new com.google.protobuf.AbstractParser<UpdateDomainRequest>() {
@java.lang.Override
public UpdateDomainRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<UpdateDomainRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<UpdateDomainRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.managedidentities.v1.UpdateDomainRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,914 | java-filestore/proto-google-cloud-filestore-v1beta1/src/main/java/com/google/cloud/filestore/v1beta1/CreateShareRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/filestore/v1beta1/cloud_filestore_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.filestore.v1beta1;
/**
*
*
* <pre>
* CreateShareRequest creates a share.
* </pre>
*
* Protobuf type {@code google.cloud.filestore.v1beta1.CreateShareRequest}
*/
public final class CreateShareRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.filestore.v1beta1.CreateShareRequest)
CreateShareRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateShareRequest.newBuilder() to construct.
private CreateShareRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateShareRequest() {
parent_ = "";
shareId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateShareRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.filestore.v1beta1.CloudFilestoreServiceProto
.internal_static_google_cloud_filestore_v1beta1_CreateShareRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.filestore.v1beta1.CloudFilestoreServiceProto
.internal_static_google_cloud_filestore_v1beta1_CreateShareRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.filestore.v1beta1.CreateShareRequest.class,
com.google.cloud.filestore.v1beta1.CreateShareRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The Filestore Instance to create the share for, in the format
* `projects/{project_id}/locations/{location}/instances/{instance_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The Filestore Instance to create the share for, in the format
* `projects/{project_id}/locations/{location}/instances/{instance_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SHARE_ID_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object shareId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the share.
* The ID must be unique within the specified instance.
*
* This value must start with a lowercase letter followed by up to 62
* lowercase letters, numbers, or hyphens, and cannot end with a hyphen.
* </pre>
*
* <code>string share_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The shareId.
*/
@java.lang.Override
public java.lang.String getShareId() {
java.lang.Object ref = shareId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
shareId_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the share.
* The ID must be unique within the specified instance.
*
* This value must start with a lowercase letter followed by up to 62
* lowercase letters, numbers, or hyphens, and cannot end with a hyphen.
* </pre>
*
* <code>string share_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for shareId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getShareIdBytes() {
java.lang.Object ref = shareId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
shareId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int SHARE_FIELD_NUMBER = 3;
private com.google.cloud.filestore.v1beta1.Share share_;
/**
*
*
* <pre>
* Required. A share resource
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Share share = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the share field is set.
*/
@java.lang.Override
public boolean hasShare() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. A share resource
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Share share = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The share.
*/
@java.lang.Override
public com.google.cloud.filestore.v1beta1.Share getShare() {
return share_ == null ? com.google.cloud.filestore.v1beta1.Share.getDefaultInstance() : share_;
}
/**
*
*
* <pre>
* Required. A share resource
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Share share = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.filestore.v1beta1.ShareOrBuilder getShareOrBuilder() {
return share_ == null ? com.google.cloud.filestore.v1beta1.Share.getDefaultInstance() : share_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(shareId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, shareId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getShare());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(shareId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, shareId_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getShare());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.filestore.v1beta1.CreateShareRequest)) {
return super.equals(obj);
}
com.google.cloud.filestore.v1beta1.CreateShareRequest other =
(com.google.cloud.filestore.v1beta1.CreateShareRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (!getShareId().equals(other.getShareId())) return false;
if (hasShare() != other.hasShare()) return false;
if (hasShare()) {
if (!getShare().equals(other.getShare())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
hash = (37 * hash) + SHARE_ID_FIELD_NUMBER;
hash = (53 * hash) + getShareId().hashCode();
if (hasShare()) {
hash = (37 * hash) + SHARE_FIELD_NUMBER;
hash = (53 * hash) + getShare().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.filestore.v1beta1.CreateShareRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.filestore.v1beta1.CreateShareRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.filestore.v1beta1.CreateShareRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.filestore.v1beta1.CreateShareRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.filestore.v1beta1.CreateShareRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.filestore.v1beta1.CreateShareRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.filestore.v1beta1.CreateShareRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.filestore.v1beta1.CreateShareRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.filestore.v1beta1.CreateShareRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.filestore.v1beta1.CreateShareRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.filestore.v1beta1.CreateShareRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.filestore.v1beta1.CreateShareRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.filestore.v1beta1.CreateShareRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* CreateShareRequest creates a share.
* </pre>
*
* Protobuf type {@code google.cloud.filestore.v1beta1.CreateShareRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.filestore.v1beta1.CreateShareRequest)
com.google.cloud.filestore.v1beta1.CreateShareRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.filestore.v1beta1.CloudFilestoreServiceProto
.internal_static_google_cloud_filestore_v1beta1_CreateShareRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.filestore.v1beta1.CloudFilestoreServiceProto
.internal_static_google_cloud_filestore_v1beta1_CreateShareRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.filestore.v1beta1.CreateShareRequest.class,
com.google.cloud.filestore.v1beta1.CreateShareRequest.Builder.class);
}
// Construct using com.google.cloud.filestore.v1beta1.CreateShareRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getShareFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
shareId_ = "";
share_ = null;
if (shareBuilder_ != null) {
shareBuilder_.dispose();
shareBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.filestore.v1beta1.CloudFilestoreServiceProto
.internal_static_google_cloud_filestore_v1beta1_CreateShareRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.filestore.v1beta1.CreateShareRequest getDefaultInstanceForType() {
return com.google.cloud.filestore.v1beta1.CreateShareRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.filestore.v1beta1.CreateShareRequest build() {
com.google.cloud.filestore.v1beta1.CreateShareRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.filestore.v1beta1.CreateShareRequest buildPartial() {
com.google.cloud.filestore.v1beta1.CreateShareRequest result =
new com.google.cloud.filestore.v1beta1.CreateShareRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.filestore.v1beta1.CreateShareRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.shareId_ = shareId_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000004) != 0)) {
result.share_ = shareBuilder_ == null ? share_ : shareBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.filestore.v1beta1.CreateShareRequest) {
return mergeFrom((com.google.cloud.filestore.v1beta1.CreateShareRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.filestore.v1beta1.CreateShareRequest other) {
if (other == com.google.cloud.filestore.v1beta1.CreateShareRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (!other.getShareId().isEmpty()) {
shareId_ = other.shareId_;
bitField0_ |= 0x00000002;
onChanged();
}
if (other.hasShare()) {
mergeShare(other.getShare());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
shareId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
input.readMessage(getShareFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The Filestore Instance to create the share for, in the format
* `projects/{project_id}/locations/{location}/instances/{instance_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The Filestore Instance to create the share for, in the format
* `projects/{project_id}/locations/{location}/instances/{instance_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The Filestore Instance to create the share for, in the format
* `projects/{project_id}/locations/{location}/instances/{instance_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Filestore Instance to create the share for, in the format
* `projects/{project_id}/locations/{location}/instances/{instance_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The Filestore Instance to create the share for, in the format
* `projects/{project_id}/locations/{location}/instances/{instance_id}`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private java.lang.Object shareId_ = "";
/**
*
*
* <pre>
* Required. The ID to use for the share.
* The ID must be unique within the specified instance.
*
* This value must start with a lowercase letter followed by up to 62
* lowercase letters, numbers, or hyphens, and cannot end with a hyphen.
* </pre>
*
* <code>string share_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The shareId.
*/
public java.lang.String getShareId() {
java.lang.Object ref = shareId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
shareId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the share.
* The ID must be unique within the specified instance.
*
* This value must start with a lowercase letter followed by up to 62
* lowercase letters, numbers, or hyphens, and cannot end with a hyphen.
* </pre>
*
* <code>string share_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return The bytes for shareId.
*/
public com.google.protobuf.ByteString getShareIdBytes() {
java.lang.Object ref = shareId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
shareId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The ID to use for the share.
* The ID must be unique within the specified instance.
*
* This value must start with a lowercase letter followed by up to 62
* lowercase letters, numbers, or hyphens, and cannot end with a hyphen.
* </pre>
*
* <code>string share_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The shareId to set.
* @return This builder for chaining.
*/
public Builder setShareId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
shareId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the share.
* The ID must be unique within the specified instance.
*
* This value must start with a lowercase letter followed by up to 62
* lowercase letters, numbers, or hyphens, and cannot end with a hyphen.
* </pre>
*
* <code>string share_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @return This builder for chaining.
*/
public Builder clearShareId() {
shareId_ = getDefaultInstance().getShareId();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The ID to use for the share.
* The ID must be unique within the specified instance.
*
* This value must start with a lowercase letter followed by up to 62
* lowercase letters, numbers, or hyphens, and cannot end with a hyphen.
* </pre>
*
* <code>string share_id = 2 [(.google.api.field_behavior) = REQUIRED];</code>
*
* @param value The bytes for shareId to set.
* @return This builder for chaining.
*/
public Builder setShareIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
shareId_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
private com.google.cloud.filestore.v1beta1.Share share_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.filestore.v1beta1.Share,
com.google.cloud.filestore.v1beta1.Share.Builder,
com.google.cloud.filestore.v1beta1.ShareOrBuilder>
shareBuilder_;
/**
*
*
* <pre>
* Required. A share resource
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Share share = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the share field is set.
*/
public boolean hasShare() {
return ((bitField0_ & 0x00000004) != 0);
}
/**
*
*
* <pre>
* Required. A share resource
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Share share = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The share.
*/
public com.google.cloud.filestore.v1beta1.Share getShare() {
if (shareBuilder_ == null) {
return share_ == null
? com.google.cloud.filestore.v1beta1.Share.getDefaultInstance()
: share_;
} else {
return shareBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. A share resource
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Share share = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setShare(com.google.cloud.filestore.v1beta1.Share value) {
if (shareBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
share_ = value;
} else {
shareBuilder_.setMessage(value);
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. A share resource
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Share share = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setShare(com.google.cloud.filestore.v1beta1.Share.Builder builderForValue) {
if (shareBuilder_ == null) {
share_ = builderForValue.build();
} else {
shareBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. A share resource
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Share share = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeShare(com.google.cloud.filestore.v1beta1.Share value) {
if (shareBuilder_ == null) {
if (((bitField0_ & 0x00000004) != 0)
&& share_ != null
&& share_ != com.google.cloud.filestore.v1beta1.Share.getDefaultInstance()) {
getShareBuilder().mergeFrom(value);
} else {
share_ = value;
}
} else {
shareBuilder_.mergeFrom(value);
}
if (share_ != null) {
bitField0_ |= 0x00000004;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. A share resource
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Share share = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearShare() {
bitField0_ = (bitField0_ & ~0x00000004);
share_ = null;
if (shareBuilder_ != null) {
shareBuilder_.dispose();
shareBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. A share resource
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Share share = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.filestore.v1beta1.Share.Builder getShareBuilder() {
bitField0_ |= 0x00000004;
onChanged();
return getShareFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. A share resource
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Share share = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.filestore.v1beta1.ShareOrBuilder getShareOrBuilder() {
if (shareBuilder_ != null) {
return shareBuilder_.getMessageOrBuilder();
} else {
return share_ == null
? com.google.cloud.filestore.v1beta1.Share.getDefaultInstance()
: share_;
}
}
/**
*
*
* <pre>
* Required. A share resource
* </pre>
*
* <code>
* .google.cloud.filestore.v1beta1.Share share = 3 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.filestore.v1beta1.Share,
com.google.cloud.filestore.v1beta1.Share.Builder,
com.google.cloud.filestore.v1beta1.ShareOrBuilder>
getShareFieldBuilder() {
if (shareBuilder_ == null) {
shareBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.filestore.v1beta1.Share,
com.google.cloud.filestore.v1beta1.Share.Builder,
com.google.cloud.filestore.v1beta1.ShareOrBuilder>(
getShare(), getParentForChildren(), isClean());
share_ = null;
}
return shareBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.filestore.v1beta1.CreateShareRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.filestore.v1beta1.CreateShareRequest)
private static final com.google.cloud.filestore.v1beta1.CreateShareRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.filestore.v1beta1.CreateShareRequest();
}
public static com.google.cloud.filestore.v1beta1.CreateShareRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateShareRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateShareRequest>() {
@java.lang.Override
public CreateShareRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateShareRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateShareRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.filestore.v1beta1.CreateShareRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,951 | java-shopping-css/proto-google-shopping-css-v1/src/main/java/com/google/shopping/css/v1/ListAccountLabelsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/shopping/css/v1/accounts_labels.proto
// Protobuf Java Version: 3.25.8
package com.google.shopping.css.v1;
/**
*
*
* <pre>
* Response message for the `ListAccountLabels` method.
* </pre>
*
* Protobuf type {@code google.shopping.css.v1.ListAccountLabelsResponse}
*/
public final class ListAccountLabelsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.shopping.css.v1.ListAccountLabelsResponse)
ListAccountLabelsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListAccountLabelsResponse.newBuilder() to construct.
private ListAccountLabelsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListAccountLabelsResponse() {
accountLabels_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListAccountLabelsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.css.v1.AccountsLabelsProto
.internal_static_google_shopping_css_v1_ListAccountLabelsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.css.v1.AccountsLabelsProto
.internal_static_google_shopping_css_v1_ListAccountLabelsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.css.v1.ListAccountLabelsResponse.class,
com.google.shopping.css.v1.ListAccountLabelsResponse.Builder.class);
}
public static final int ACCOUNT_LABELS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.shopping.css.v1.AccountLabel> accountLabels_;
/**
*
*
* <pre>
* The labels from the specified account.
* </pre>
*
* <code>repeated .google.shopping.css.v1.AccountLabel account_labels = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.shopping.css.v1.AccountLabel> getAccountLabelsList() {
return accountLabels_;
}
/**
*
*
* <pre>
* The labels from the specified account.
* </pre>
*
* <code>repeated .google.shopping.css.v1.AccountLabel account_labels = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.shopping.css.v1.AccountLabelOrBuilder>
getAccountLabelsOrBuilderList() {
return accountLabels_;
}
/**
*
*
* <pre>
* The labels from the specified account.
* </pre>
*
* <code>repeated .google.shopping.css.v1.AccountLabel account_labels = 1;</code>
*/
@java.lang.Override
public int getAccountLabelsCount() {
return accountLabels_.size();
}
/**
*
*
* <pre>
* The labels from the specified account.
* </pre>
*
* <code>repeated .google.shopping.css.v1.AccountLabel account_labels = 1;</code>
*/
@java.lang.Override
public com.google.shopping.css.v1.AccountLabel getAccountLabels(int index) {
return accountLabels_.get(index);
}
/**
*
*
* <pre>
* The labels from the specified account.
* </pre>
*
* <code>repeated .google.shopping.css.v1.AccountLabel account_labels = 1;</code>
*/
@java.lang.Override
public com.google.shopping.css.v1.AccountLabelOrBuilder getAccountLabelsOrBuilder(int index) {
return accountLabels_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < accountLabels_.size(); i++) {
output.writeMessage(1, accountLabels_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < accountLabels_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, accountLabels_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.shopping.css.v1.ListAccountLabelsResponse)) {
return super.equals(obj);
}
com.google.shopping.css.v1.ListAccountLabelsResponse other =
(com.google.shopping.css.v1.ListAccountLabelsResponse) obj;
if (!getAccountLabelsList().equals(other.getAccountLabelsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getAccountLabelsCount() > 0) {
hash = (37 * hash) + ACCOUNT_LABELS_FIELD_NUMBER;
hash = (53 * hash) + getAccountLabelsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.shopping.css.v1.ListAccountLabelsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.css.v1.ListAccountLabelsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.css.v1.ListAccountLabelsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.css.v1.ListAccountLabelsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.css.v1.ListAccountLabelsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.css.v1.ListAccountLabelsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.css.v1.ListAccountLabelsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.css.v1.ListAccountLabelsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.css.v1.ListAccountLabelsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.shopping.css.v1.ListAccountLabelsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.css.v1.ListAccountLabelsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.css.v1.ListAccountLabelsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.shopping.css.v1.ListAccountLabelsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for the `ListAccountLabels` method.
* </pre>
*
* Protobuf type {@code google.shopping.css.v1.ListAccountLabelsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.shopping.css.v1.ListAccountLabelsResponse)
com.google.shopping.css.v1.ListAccountLabelsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.css.v1.AccountsLabelsProto
.internal_static_google_shopping_css_v1_ListAccountLabelsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.css.v1.AccountsLabelsProto
.internal_static_google_shopping_css_v1_ListAccountLabelsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.css.v1.ListAccountLabelsResponse.class,
com.google.shopping.css.v1.ListAccountLabelsResponse.Builder.class);
}
// Construct using com.google.shopping.css.v1.ListAccountLabelsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (accountLabelsBuilder_ == null) {
accountLabels_ = java.util.Collections.emptyList();
} else {
accountLabels_ = null;
accountLabelsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.shopping.css.v1.AccountsLabelsProto
.internal_static_google_shopping_css_v1_ListAccountLabelsResponse_descriptor;
}
@java.lang.Override
public com.google.shopping.css.v1.ListAccountLabelsResponse getDefaultInstanceForType() {
return com.google.shopping.css.v1.ListAccountLabelsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.shopping.css.v1.ListAccountLabelsResponse build() {
com.google.shopping.css.v1.ListAccountLabelsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.shopping.css.v1.ListAccountLabelsResponse buildPartial() {
com.google.shopping.css.v1.ListAccountLabelsResponse result =
new com.google.shopping.css.v1.ListAccountLabelsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.shopping.css.v1.ListAccountLabelsResponse result) {
if (accountLabelsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
accountLabels_ = java.util.Collections.unmodifiableList(accountLabels_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.accountLabels_ = accountLabels_;
} else {
result.accountLabels_ = accountLabelsBuilder_.build();
}
}
private void buildPartial0(com.google.shopping.css.v1.ListAccountLabelsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.shopping.css.v1.ListAccountLabelsResponse) {
return mergeFrom((com.google.shopping.css.v1.ListAccountLabelsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.shopping.css.v1.ListAccountLabelsResponse other) {
if (other == com.google.shopping.css.v1.ListAccountLabelsResponse.getDefaultInstance())
return this;
if (accountLabelsBuilder_ == null) {
if (!other.accountLabels_.isEmpty()) {
if (accountLabels_.isEmpty()) {
accountLabels_ = other.accountLabels_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureAccountLabelsIsMutable();
accountLabels_.addAll(other.accountLabels_);
}
onChanged();
}
} else {
if (!other.accountLabels_.isEmpty()) {
if (accountLabelsBuilder_.isEmpty()) {
accountLabelsBuilder_.dispose();
accountLabelsBuilder_ = null;
accountLabels_ = other.accountLabels_;
bitField0_ = (bitField0_ & ~0x00000001);
accountLabelsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getAccountLabelsFieldBuilder()
: null;
} else {
accountLabelsBuilder_.addAllMessages(other.accountLabels_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.shopping.css.v1.AccountLabel m =
input.readMessage(
com.google.shopping.css.v1.AccountLabel.parser(), extensionRegistry);
if (accountLabelsBuilder_ == null) {
ensureAccountLabelsIsMutable();
accountLabels_.add(m);
} else {
accountLabelsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.shopping.css.v1.AccountLabel> accountLabels_ =
java.util.Collections.emptyList();
private void ensureAccountLabelsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
accountLabels_ =
new java.util.ArrayList<com.google.shopping.css.v1.AccountLabel>(accountLabels_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.shopping.css.v1.AccountLabel,
com.google.shopping.css.v1.AccountLabel.Builder,
com.google.shopping.css.v1.AccountLabelOrBuilder>
accountLabelsBuilder_;
/**
*
*
* <pre>
* The labels from the specified account.
* </pre>
*
* <code>repeated .google.shopping.css.v1.AccountLabel account_labels = 1;</code>
*/
public java.util.List<com.google.shopping.css.v1.AccountLabel> getAccountLabelsList() {
if (accountLabelsBuilder_ == null) {
return java.util.Collections.unmodifiableList(accountLabels_);
} else {
return accountLabelsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The labels from the specified account.
* </pre>
*
* <code>repeated .google.shopping.css.v1.AccountLabel account_labels = 1;</code>
*/
public int getAccountLabelsCount() {
if (accountLabelsBuilder_ == null) {
return accountLabels_.size();
} else {
return accountLabelsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The labels from the specified account.
* </pre>
*
* <code>repeated .google.shopping.css.v1.AccountLabel account_labels = 1;</code>
*/
public com.google.shopping.css.v1.AccountLabel getAccountLabels(int index) {
if (accountLabelsBuilder_ == null) {
return accountLabels_.get(index);
} else {
return accountLabelsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The labels from the specified account.
* </pre>
*
* <code>repeated .google.shopping.css.v1.AccountLabel account_labels = 1;</code>
*/
public Builder setAccountLabels(int index, com.google.shopping.css.v1.AccountLabel value) {
if (accountLabelsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAccountLabelsIsMutable();
accountLabels_.set(index, value);
onChanged();
} else {
accountLabelsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The labels from the specified account.
* </pre>
*
* <code>repeated .google.shopping.css.v1.AccountLabel account_labels = 1;</code>
*/
public Builder setAccountLabels(
int index, com.google.shopping.css.v1.AccountLabel.Builder builderForValue) {
if (accountLabelsBuilder_ == null) {
ensureAccountLabelsIsMutable();
accountLabels_.set(index, builderForValue.build());
onChanged();
} else {
accountLabelsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The labels from the specified account.
* </pre>
*
* <code>repeated .google.shopping.css.v1.AccountLabel account_labels = 1;</code>
*/
public Builder addAccountLabels(com.google.shopping.css.v1.AccountLabel value) {
if (accountLabelsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAccountLabelsIsMutable();
accountLabels_.add(value);
onChanged();
} else {
accountLabelsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The labels from the specified account.
* </pre>
*
* <code>repeated .google.shopping.css.v1.AccountLabel account_labels = 1;</code>
*/
public Builder addAccountLabels(int index, com.google.shopping.css.v1.AccountLabel value) {
if (accountLabelsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureAccountLabelsIsMutable();
accountLabels_.add(index, value);
onChanged();
} else {
accountLabelsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The labels from the specified account.
* </pre>
*
* <code>repeated .google.shopping.css.v1.AccountLabel account_labels = 1;</code>
*/
public Builder addAccountLabels(
com.google.shopping.css.v1.AccountLabel.Builder builderForValue) {
if (accountLabelsBuilder_ == null) {
ensureAccountLabelsIsMutable();
accountLabels_.add(builderForValue.build());
onChanged();
} else {
accountLabelsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The labels from the specified account.
* </pre>
*
* <code>repeated .google.shopping.css.v1.AccountLabel account_labels = 1;</code>
*/
public Builder addAccountLabels(
int index, com.google.shopping.css.v1.AccountLabel.Builder builderForValue) {
if (accountLabelsBuilder_ == null) {
ensureAccountLabelsIsMutable();
accountLabels_.add(index, builderForValue.build());
onChanged();
} else {
accountLabelsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The labels from the specified account.
* </pre>
*
* <code>repeated .google.shopping.css.v1.AccountLabel account_labels = 1;</code>
*/
public Builder addAllAccountLabels(
java.lang.Iterable<? extends com.google.shopping.css.v1.AccountLabel> values) {
if (accountLabelsBuilder_ == null) {
ensureAccountLabelsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, accountLabels_);
onChanged();
} else {
accountLabelsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The labels from the specified account.
* </pre>
*
* <code>repeated .google.shopping.css.v1.AccountLabel account_labels = 1;</code>
*/
public Builder clearAccountLabels() {
if (accountLabelsBuilder_ == null) {
accountLabels_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
accountLabelsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The labels from the specified account.
* </pre>
*
* <code>repeated .google.shopping.css.v1.AccountLabel account_labels = 1;</code>
*/
public Builder removeAccountLabels(int index) {
if (accountLabelsBuilder_ == null) {
ensureAccountLabelsIsMutable();
accountLabels_.remove(index);
onChanged();
} else {
accountLabelsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The labels from the specified account.
* </pre>
*
* <code>repeated .google.shopping.css.v1.AccountLabel account_labels = 1;</code>
*/
public com.google.shopping.css.v1.AccountLabel.Builder getAccountLabelsBuilder(int index) {
return getAccountLabelsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The labels from the specified account.
* </pre>
*
* <code>repeated .google.shopping.css.v1.AccountLabel account_labels = 1;</code>
*/
public com.google.shopping.css.v1.AccountLabelOrBuilder getAccountLabelsOrBuilder(int index) {
if (accountLabelsBuilder_ == null) {
return accountLabels_.get(index);
} else {
return accountLabelsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The labels from the specified account.
* </pre>
*
* <code>repeated .google.shopping.css.v1.AccountLabel account_labels = 1;</code>
*/
public java.util.List<? extends com.google.shopping.css.v1.AccountLabelOrBuilder>
getAccountLabelsOrBuilderList() {
if (accountLabelsBuilder_ != null) {
return accountLabelsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(accountLabels_);
}
}
/**
*
*
* <pre>
* The labels from the specified account.
* </pre>
*
* <code>repeated .google.shopping.css.v1.AccountLabel account_labels = 1;</code>
*/
public com.google.shopping.css.v1.AccountLabel.Builder addAccountLabelsBuilder() {
return getAccountLabelsFieldBuilder()
.addBuilder(com.google.shopping.css.v1.AccountLabel.getDefaultInstance());
}
/**
*
*
* <pre>
* The labels from the specified account.
* </pre>
*
* <code>repeated .google.shopping.css.v1.AccountLabel account_labels = 1;</code>
*/
public com.google.shopping.css.v1.AccountLabel.Builder addAccountLabelsBuilder(int index) {
return getAccountLabelsFieldBuilder()
.addBuilder(index, com.google.shopping.css.v1.AccountLabel.getDefaultInstance());
}
/**
*
*
* <pre>
* The labels from the specified account.
* </pre>
*
* <code>repeated .google.shopping.css.v1.AccountLabel account_labels = 1;</code>
*/
public java.util.List<com.google.shopping.css.v1.AccountLabel.Builder>
getAccountLabelsBuilderList() {
return getAccountLabelsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.shopping.css.v1.AccountLabel,
com.google.shopping.css.v1.AccountLabel.Builder,
com.google.shopping.css.v1.AccountLabelOrBuilder>
getAccountLabelsFieldBuilder() {
if (accountLabelsBuilder_ == null) {
accountLabelsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.shopping.css.v1.AccountLabel,
com.google.shopping.css.v1.AccountLabel.Builder,
com.google.shopping.css.v1.AccountLabelOrBuilder>(
accountLabels_,
((bitField0_ & 0x00000001) != 0),
getParentForChildren(),
isClean());
accountLabels_ = null;
}
return accountLabelsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.shopping.css.v1.ListAccountLabelsResponse)
}
// @@protoc_insertion_point(class_scope:google.shopping.css.v1.ListAccountLabelsResponse)
private static final com.google.shopping.css.v1.ListAccountLabelsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.shopping.css.v1.ListAccountLabelsResponse();
}
public static com.google.shopping.css.v1.ListAccountLabelsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListAccountLabelsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListAccountLabelsResponse>() {
@java.lang.Override
public ListAccountLabelsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListAccountLabelsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListAccountLabelsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.shopping.css.v1.ListAccountLabelsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
google/j2objc | 37,037 | jre_emul/android/platform/libcore/luni/src/main/java/javax/xml/datatype/Duration.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
//$Id: Duration.java 759828 2009-03-30 01:26:29Z mrglavas $
package javax.xml.datatype;
import java.math.BigDecimal;
import java.math.BigInteger;
import java.util.Calendar;
import java.util.Date;
import java.util.GregorianCalendar;
import javax.xml.namespace.QName;
/**
* <p>Immutable representation of a time span as defined in
* the W3C XML Schema 1.0 specification.</p>
*
* <p>A Duration object represents a period of Gregorian time,
* which consists of six fields (years, months, days, hours,
* minutes, and seconds) plus a sign (+/-) field.</p>
*
* <p>The first five fields have non-negative (>=0) integers or null
* (which represents that the field is not set),
* and the seconds field has a non-negative decimal or null.
* A negative sign indicates a negative duration.</p>
*
* <p>This class provides a number of methods that make it easy
* to use for the duration datatype of XML Schema 1.0 with
* the errata.</p>
*
* <h2>Order relationship</h2>
* <p>Duration objects only have partial order, where two values A and B
* maybe either:</p>
* <ol>
* <li>A<B (A is shorter than B)
* <li>A>B (A is longer than B)
* <li>A==B (A and B are of the same duration)
* <li>A<>B (Comparison between A and B is indeterminate)
* </ol>
*
* <p>For example, 30 days cannot be meaningfully compared to one month.
* The {@link #compare(Duration duration)} method implements this
* relationship.</p>
*
* <p>See the {@link #isLongerThan(Duration)} method for details about
* the order relationship among <code>Duration</code> objects.</p>
*
* <h2>Operations over Duration</h2>
* <p>This class provides a set of basic arithmetic operations, such
* as addition, subtraction and multiplication.
* Because durations don't have total order, an operation could
* fail for some combinations of operations. For example, you cannot
* subtract 15 days from 1 month. See the javadoc of those methods
* for detailed conditions where this could happen.</p>
*
* <p>Also, division of a duration by a number is not provided because
* the <code>Duration</code> class can only deal with finite precision
* decimal numbers. For example, one cannot represent 1 sec divided by 3.</p>
*
* <p>However, you could substitute a division by 3 with multiplying
* by numbers such as 0.3 or 0.333.</p>
*
* <h2>Range of allowed values</h2>
* <p>
* Because some operations of <code>Duration</code> rely on {@link Calendar}
* even though {@link Duration} can hold very large or very small values,
* some of the methods may not work correctly on such <code>Duration</code>s.
* The impacted methods document their dependency on {@link Calendar}.
*
*
* @author <a href="mailto:Joseph.Fialli@Sun.COM">Joseph Fialli</a>
* @author <a href="mailto:Kohsuke.Kawaguchi@Sun.com">Kohsuke Kawaguchi</a>
* @author <a href="mailto:Jeff.Suttor@Sun.com">Jeff Suttor</a>
* @version $Revision: 759828 $, $Date: 2009-03-29 18:26:29 -0700 (Sun, 29 Mar 2009) $
* @see XMLGregorianCalendar#add(Duration)
* @since 1.5
*/
public abstract class Duration {
/**
* <p>Return the name of the XML Schema date/time type that this instance
* maps to. Type is computed based on fields that are set,
* i.e. {@link #isSet(DatatypeConstants.Field field)} == <code>true</code>.</p>
*
* <table border="2" rules="all" cellpadding="2">
* <thead>
* <tr>
* <th align="center" colspan="7">
* Required fields for XML Schema 1.0 Date/Time Datatypes.<br/>
* <i>(timezone is optional for all date/time datatypes)</i>
* </th>
* </tr>
* </thead>
* <tbody>
* <tr>
* <td>Datatype</td>
* <td>year</td>
* <td>month</td>
* <td>day</td>
* <td>hour</td>
* <td>minute</td>
* <td>second</td>
* </tr>
* <tr>
* <td>{@link DatatypeConstants#DURATION}</td>
* <td>X</td>
* <td>X</td>
* <td>X</td>
* <td>X</td>
* <td>X</td>
* <td>X</td>
* </tr>
* <tr>
* <td>{@link DatatypeConstants#DURATION_DAYTIME}</td>
* <td></td>
* <td></td>
* <td>X</td>
* <td>X</td>
* <td>X</td>
* <td>X</td>
* </tr>
* <tr>
* <td>{@link DatatypeConstants#DURATION_YEARMONTH}</td>
* <td>X</td>
* <td>X</td>
* <td></td>
* <td></td>
* <td></td>
* <td></td>
* </tr>
* </tbody>
* </table>
*
* @return one of the following constants:
* {@link DatatypeConstants#DURATION},
* {@link DatatypeConstants#DURATION_DAYTIME} or
* {@link DatatypeConstants#DURATION_YEARMONTH}.
*
* @throws IllegalStateException If the combination of set fields does not match one of the XML Schema date/time datatypes.
*/
public QName getXMLSchemaType() {
boolean yearSet = isSet(DatatypeConstants.YEARS);
boolean monthSet = isSet(DatatypeConstants.MONTHS);
boolean daySet = isSet(DatatypeConstants.DAYS);
boolean hourSet = isSet(DatatypeConstants.HOURS);
boolean minuteSet = isSet(DatatypeConstants.MINUTES);
boolean secondSet = isSet(DatatypeConstants.SECONDS);
// DURATION
if (yearSet
&& monthSet
&& daySet
&& hourSet
&& minuteSet
&& secondSet) {
return DatatypeConstants.DURATION;
}
// DURATION_DAYTIME
if (!yearSet
&& !monthSet
&& daySet
&& hourSet
&& minuteSet
&& secondSet) {
return DatatypeConstants.DURATION_DAYTIME;
}
// DURATION_YEARMONTH
if (yearSet
&& monthSet
&& !daySet
&& !hourSet
&& !minuteSet
&& !secondSet) {
return DatatypeConstants.DURATION_YEARMONTH;
}
// nothing matches
throw new IllegalStateException(
"javax.xml.datatype.Duration#getXMLSchemaType():"
+ " this Duration does not match one of the XML Schema date/time datatypes:"
+ " year set = " + yearSet
+ " month set = " + monthSet
+ " day set = " + daySet
+ " hour set = " + hourSet
+ " minute set = " + minuteSet
+ " second set = " + secondSet
);
}
/**
* Returns the sign of this duration in -1,0, or 1.
*
* @return
* -1 if this duration is negative, 0 if the duration is zero,
* and 1 if the duration is positive.
*/
public abstract int getSign();
/**
* <p>Get the years value of this <code>Duration</code> as an <code>int</code> or <code>0</code> if not present.</p>
*
* <p><code>getYears()</code> is a convenience method for
* {@link #getField(DatatypeConstants.Field field) getField(DatatypeConstants.YEARS)}.</p>
*
* <p>As the return value is an <code>int</code>, an incorrect value will be returned for <code>Duration</code>s
* with years that go beyond the range of an <code>int</code>.
* Use {@link #getField(DatatypeConstants.Field field) getField(DatatypeConstants.YEARS)} to avoid possible loss of precision.</p>
*
* @return If the years field is present, return its value as an <code>int</code>, else return <code>0</code>.
*/
public int getYears() {
return getFieldValueAsInt(DatatypeConstants.YEARS);
}
/**
* Obtains the value of the MONTHS field as an integer value,
* or 0 if not present.
*
* This method works just like {@link #getYears()} except
* that this method works on the MONTHS field.
*
* @return Months of this <code>Duration</code>.
*/
public int getMonths() {
return getFieldValueAsInt(DatatypeConstants.MONTHS);
}
/**
* Obtains the value of the DAYS field as an integer value,
* or 0 if not present.
*
* This method works just like {@link #getYears()} except
* that this method works on the DAYS field.
*
* @return Days of this <code>Duration</code>.
*/
public int getDays() {
return getFieldValueAsInt(DatatypeConstants.DAYS);
}
/**
* Obtains the value of the HOURS field as an integer value,
* or 0 if not present.
*
* This method works just like {@link #getYears()} except
* that this method works on the HOURS field.
*
* @return Hours of this <code>Duration</code>.
*
*/
public int getHours() {
return getFieldValueAsInt(DatatypeConstants.HOURS);
}
/**
* Obtains the value of the MINUTES field as an integer value,
* or 0 if not present.
*
* This method works just like {@link #getYears()} except
* that this method works on the MINUTES field.
*
* @return Minutes of this <code>Duration</code>.
*
*/
public int getMinutes() {
return getFieldValueAsInt(DatatypeConstants.MINUTES);
}
/**
* Obtains the value of the SECONDS field as an integer value,
* or 0 if not present.
*
* This method works just like {@link #getYears()} except
* that this method works on the SECONDS field.
*
* @return seconds in the integer value. The fraction of seconds
* will be discarded (for example, if the actual value is 2.5,
* this method returns 2)
*/
public int getSeconds() {
return getFieldValueAsInt(DatatypeConstants.SECONDS);
}
/**
* <p>Returns the length of the duration in milliseconds.</p>
*
* <p>If the seconds field carries more digits than millisecond order,
* those will be simply discarded (or in other words, rounded to zero.)
* For example, for any Calendar value <code>x</code>,</p>
* <pre>
* <code>new Duration("PT10.00099S").getTimeInMills(x) == 10000</code>.
* <code>new Duration("-PT10.00099S").getTimeInMills(x) == -10000</code>.
* </pre>
*
* <p>
* Note that this method uses the {@link #addTo(Calendar)} method,
* which may work incorrectly with <code>Duration</code> objects with
* very large values in its fields. See the {@link #addTo(Calendar)}
* method for details.
*
* @param startInstant
* The length of a month/year varies. The <code>startInstant</code> is
* used to disambiguate this variance. Specifically, this method
* returns the difference between <code>startInstant</code> and
* <code>startInstant+duration</code>
*
* @return milliseconds between <code>startInstant</code> and
* <code>startInstant</code> plus this <code>Duration</code>
*
* @throws NullPointerException if <code>startInstant</code> parameter
* is null.
*
*/
public long getTimeInMillis(final Calendar startInstant) {
Calendar cal = (Calendar) startInstant.clone();
addTo(cal);
return getCalendarTimeInMillis(cal)
- getCalendarTimeInMillis(startInstant);
}
/**
* <p>Returns the length of the duration in milliseconds.</p>
*
* <p>If the seconds field carries more digits than millisecond order,
* those will be simply discarded (or in other words, rounded to zero.)
* For example, for any <code>Date</code> value <code>x</code>,</p>
* <pre>
* <code>new Duration("PT10.00099S").getTimeInMills(x) == 10000</code>.
* <code>new Duration("-PT10.00099S").getTimeInMills(x) == -10000</code>.
* </pre>
*
* <p>
* Note that this method uses the {@link #addTo(Date)} method,
* which may work incorrectly with <code>Duration</code> objects with
* very large values in its fields. See the {@link #addTo(Date)}
* method for details.
*
* @param startInstant
* The length of a month/year varies. The <code>startInstant</code> is
* used to disambiguate this variance. Specifically, this method
* returns the difference between <code>startInstant</code> and
* <code>startInstant+duration</code>.
*
* @throws NullPointerException
* If the startInstant parameter is null.
*
* @return milliseconds between <code>startInstant</code> and
* <code>startInstant</code> plus this <code>Duration</code>
*
* @see #getTimeInMillis(Calendar)
*/
public long getTimeInMillis(final Date startInstant) {
Calendar cal = new GregorianCalendar();
cal.setTime(startInstant);
this.addTo(cal);
return getCalendarTimeInMillis(cal) - startInstant.getTime();
}
/**
* Gets the value of a field.
*
* Fields of a duration object may contain arbitrary large value.
* Therefore this method is designed to return a {@link Number} object.
*
* In case of YEARS, MONTHS, DAYS, HOURS, and MINUTES, the returned
* number will be a non-negative integer. In case of seconds,
* the returned number may be a non-negative decimal value.
*
* @param field
* one of the six Field constants (YEARS,MONTHS,DAYS,HOURS,
* MINUTES, or SECONDS.)
* @return
* If the specified field is present, this method returns
* a non-null non-negative {@link Number} object that
* represents its value. If it is not present, return null.
* For YEARS, MONTHS, DAYS, HOURS, and MINUTES, this method
* returns a {@link java.math.BigInteger} object. For SECONDS, this
* method returns a {@link java.math.BigDecimal}.
*
* @throws NullPointerException If the <code>field</code> is <code>null</code>.
*/
public abstract Number getField(final DatatypeConstants.Field field);
/**
* Gets the value of a field as an <code>int</code>.
*
* @param field
* one of the six Field constants (YEARS,MONTHS,DAYS,HOURS,
* MINUTES, or SECONDS.)
* @return
* If the field is present, return its value as an <code>int</code>,
* else return <code>0</code>.
*/
private int getFieldValueAsInt(final DatatypeConstants.Field field) {
Number n = getField(field);
if (n != null) {
return n.intValue();
}
return 0;
}
/**
* Checks if a field is set.
*
* A field of a duration object may or may not be present.
* This method can be used to test if a field is present.
*
* @param field
* one of the six Field constants (YEARS,MONTHS,DAYS,HOURS,
* MINUTES, or SECONDS.)
* @return
* true if the field is present. false if not.
*
* @throws NullPointerException
* If the field parameter is null.
*/
public abstract boolean isSet(final DatatypeConstants.Field field);
/**
* <p>Computes a new duration whose value is <code>this+rhs</code>.</p>
*
* <p>For example,</p>
* <pre>
* "1 day" + "-3 days" = "-2 days"
* "1 year" + "1 day" = "1 year and 1 day"
* "-(1 hour,50 minutes)" + "-20 minutes" = "-(1 hours,70 minutes)"
* "15 hours" + "-3 days" = "-(2 days,9 hours)"
* "1 year" + "-1 day" = IllegalStateException
* </pre>
*
* <p>Since there's no way to meaningfully subtract 1 day from 1 month,
* there are cases where the operation fails in
* {@link IllegalStateException}.</p>
*
* <p>
* Formally, the computation is defined as follows.</p>
* <p>
* Firstly, we can assume that two <code>Duration</code>s to be added
* are both positive without losing generality (i.e.,
* <code>(-X)+Y=Y-X</code>, <code>X+(-Y)=X-Y</code>,
* <code>(-X)+(-Y)=-(X+Y)</code>)
*
* <p>
* Addition of two positive <code>Duration</code>s are simply defined as
* field by field addition where missing fields are treated as 0.
* <p>
* A field of the resulting <code>Duration</code> will be unset if and
* only if respective fields of two input <code>Duration</code>s are unset.
* <p>
* Note that <code>lhs.add(rhs)</code> will be always successful if
* <code>lhs.signum()*rhs.signum()!=-1</code> or both of them are
* normalized.</p>
*
* @param rhs <code>Duration</code> to add to this <code>Duration</code>
*
* @return
* non-null valid Duration object.
*
* @throws NullPointerException
* If the rhs parameter is null.
* @throws IllegalStateException
* If two durations cannot be meaningfully added. For
* example, adding negative one day to one month causes
* this exception.
*
*
* @see #subtract(Duration)
*/
public abstract Duration add(final Duration rhs);
/**
* Adds this duration to a {@link Calendar} object.
*
* <p>
* Calls {@link java.util.Calendar#add(int,int)} in the
* order of YEARS, MONTHS, DAYS, HOURS, MINUTES, SECONDS, and MILLISECONDS
* if those fields are present. Because the {@link Calendar} class
* uses int to hold values, there are cases where this method
* won't work correctly (for example if values of fields
* exceed the range of int.)
* </p>
*
* <p>
* Also, since this duration class is a Gregorian duration, this
* method will not work correctly if the given {@link Calendar}
* object is based on some other calendar systems.
* </p>
*
* <p>
* Any fractional parts of this <code>Duration</code> object
* beyond milliseconds will be simply ignored. For example, if
* this duration is "P1.23456S", then 1 is added to SECONDS,
* 234 is added to MILLISECONDS, and the rest will be unused.
* </p>
*
* <p>
* Note that because {@link Calendar#add(int, int)} is using
* <tt>int</tt>, <code>Duration</code> with values beyond the
* range of <tt>int</tt> in its fields
* will cause overflow/underflow to the given {@link Calendar}.
* {@link XMLGregorianCalendar#add(Duration)} provides the same
* basic operation as this method while avoiding
* the overflow/underflow issues.
*
* @param calendar
* A calendar object whose value will be modified.
* @throws NullPointerException
* if the calendar parameter is null.
*/
public abstract void addTo(Calendar calendar);
/**
* Adds this duration to a {@link Date} object.
*
* <p>
* The given date is first converted into
* a {@link java.util.GregorianCalendar}, then the duration
* is added exactly like the {@link #addTo(Calendar)} method.
*
* <p>
* The updated time instant is then converted back into a
* {@link Date} object and used to update the given {@link Date} object.
*
* <p>
* This somewhat redundant computation is necessary to unambiguously
* determine the duration of months and years.
*
* @param date
* A date object whose value will be modified.
* @throws NullPointerException
* if the date parameter is null.
*/
public void addTo(Date date) {
// check data parameter
if (date == null) {
throw new NullPointerException("date == null");
}
Calendar cal = new GregorianCalendar();
cal.setTime(date);
this.addTo(cal);
date.setTime(getCalendarTimeInMillis(cal));
}
/**
* <p>Computes a new duration whose value is <code>this-rhs</code>.</p>
*
* <p>For example:</p>
* <pre>
* "1 day" - "-3 days" = "4 days"
* "1 year" - "1 day" = IllegalStateException
* "-(1 hour,50 minutes)" - "-20 minutes" = "-(1hours,30 minutes)"
* "15 hours" - "-3 days" = "3 days and 15 hours"
* "1 year" - "-1 day" = "1 year and 1 day"
* </pre>
*
* <p>Since there's no way to meaningfully subtract 1 day from 1 month,
* there are cases where the operation fails in {@link IllegalStateException}.</p>
*
* <p>Formally the computation is defined as follows.
* First, we can assume that two <code>Duration</code>s are both positive
* without losing generality. (i.e.,
* <code>(-X)-Y=-(X+Y)</code>, <code>X-(-Y)=X+Y</code>,
* <code>(-X)-(-Y)=-(X-Y)</code>)</p>
*
* <p>Then two durations are subtracted field by field.
* If the sign of any non-zero field <tt>F</tt> is different from
* the sign of the most significant field,
* 1 (if <tt>F</tt> is negative) or -1 (otherwise)
* will be borrowed from the next bigger unit of <tt>F</tt>.</p>
*
* <p>This process is repeated until all the non-zero fields have
* the same sign.</p>
*
* <p>If a borrow occurs in the days field (in other words, if
* the computation needs to borrow 1 or -1 month to compensate
* days), then the computation fails by throwing an
* {@link IllegalStateException}.</p>
*
* @param rhs <code>Duration</code> to subtract from this <code>Duration</code>.
*
* @return New <code>Duration</code> created from subtracting <code>rhs</code> from this <code>Duration</code>.
*
* @throws IllegalStateException
* If two durations cannot be meaningfully subtracted. For
* example, subtracting one day from one month causes
* this exception.
*
* @throws NullPointerException
* If the rhs parameter is null.
*
* @see #add(Duration)
*/
public Duration subtract(final Duration rhs) {
return add(rhs.negate());
}
/**
* <p>Computes a new duration whose value is <code>factor</code> times
* longer than the value of this duration.</p>
*
* <p>This method is provided for the convenience.
* It is functionally equivalent to the following code:</p>
* <pre>
* multiply(new BigDecimal(String.valueOf(factor)))
* </pre>
*
* @param factor Factor times longer of new <code>Duration</code> to create.
*
* @return New <code>Duration</code> that is <code>factor</code>times longer than this <code>Duration</code>.
*
* @see #multiply(BigDecimal)
*/
public Duration multiply(int factor) {
return multiply(BigDecimal.valueOf(factor));
}
/**
* Computes a new duration whose value is <code>factor</code> times
* longer than the value of this duration.
*
* <p>
* For example,
* <pre>
* "P1M" (1 month) * "12" = "P12M" (12 months)
* "PT1M" (1 min) * "0.3" = "PT18S" (18 seconds)
* "P1M" (1 month) * "1.5" = IllegalStateException
* </pre>
*
* <p>
* Since the <code>Duration</code> class is immutable, this method
* doesn't change the value of this object. It simply computes
* a new Duration object and returns it.
*
* <p>
* The operation will be performed field by field with the precision
* of {@link BigDecimal}. Since all the fields except seconds are
* restricted to hold integers,
* any fraction produced by the computation will be
* carried down toward the next lower unit. For example,
* if you multiply "P1D" (1 day) with "0.5", then it will be 0.5 day,
* which will be carried down to "PT12H" (12 hours).
* When fractions of month cannot be meaningfully carried down
* to days, or year to months, this will cause an
* {@link IllegalStateException} to be thrown.
* For example if you multiple one month by 0.5.</p>
*
* <p>
* To avoid {@link IllegalStateException}, use
* the {@link #normalizeWith(Calendar)} method to remove the years
* and months fields.
*
* @param factor to multiply by
*
* @return
* returns a non-null valid <code>Duration</code> object
*
* @throws IllegalStateException if operation produces fraction in
* the months field.
*
* @throws NullPointerException if the <code>factor</code> parameter is
* <code>null</code>.
*
*/
public abstract Duration multiply(final BigDecimal factor);
/**
* Returns a new <code>Duration</code> object whose
* value is <code>-this</code>.
*
* <p>
* Since the <code>Duration</code> class is immutable, this method
* doesn't change the value of this object. It simply computes
* a new Duration object and returns it.
*
* @return
* always return a non-null valid <code>Duration</code> object.
*/
public abstract Duration negate();
/**
* <p>Converts the years and months fields into the days field
* by using a specific time instant as the reference point.</p>
*
* <p>For example, duration of one month normalizes to 31 days
* given the start time instance "July 8th 2003, 17:40:32".</p>
*
* <p>Formally, the computation is done as follows:</p>
* <ol>
* <li>the given Calendar object is cloned</li>
* <li>the years, months and days fields will be added to the {@link Calendar} object
* by using the {@link Calendar#add(int,int)} method</li>
* <li>the difference between the two Calendars in computed in milliseconds and converted to days,
* if a remainder occurs due to Daylight Savings Time, it is discarded</li>
* <li>the computed days, along with the hours, minutes and seconds
* fields of this duration object is used to construct a new
* Duration object.</li>
* </ol>
*
* <p>Note that since the Calendar class uses <code>int</code> to
* hold the value of year and month, this method may produce
* an unexpected result if this duration object holds
* a very large value in the years or months fields.</p>
*
* @param startTimeInstant <code>Calendar</code> reference point.
*
* @return <code>Duration</code> of years and months of this <code>Duration</code> as days.
*
* @throws NullPointerException If the startTimeInstant parameter is null.
*/
public abstract Duration normalizeWith(final Calendar startTimeInstant);
/**
* <p>Partial order relation comparison with this <code>Duration</code> instance.</p>
*
* <p>Comparison result must be in accordance with
* <a href="http://www.w3.org/TR/xmlschema-2/#duration-order">W3C XML Schema 1.0 Part 2, Section 3.2.7.6.2,
* <i>Order relation on duration</i></a>.</p>
*
* <p>Return:</p>
* <ul>
* <li>{@link DatatypeConstants#LESSER} if this <code>Duration</code> is shorter than <code>duration</code> parameter</li>
* <li>{@link DatatypeConstants#EQUAL} if this <code>Duration</code> is equal to <code>duration</code> parameter</li>
* <li>{@link DatatypeConstants#GREATER} if this <code>Duration</code> is longer than <code>duration</code> parameter</li>
* <li>{@link DatatypeConstants#INDETERMINATE} if a conclusive partial order relation cannot be determined</li>
* </ul>
*
* @param duration to compare
*
* @return the relationship between <code>this</code> <code>Duration</code>and <code>duration</code> parameter as
* {@link DatatypeConstants#LESSER}, {@link DatatypeConstants#EQUAL}, {@link DatatypeConstants#GREATER}
* or {@link DatatypeConstants#INDETERMINATE}.
*
* @throws UnsupportedOperationException If the underlying implementation
* cannot reasonably process the request, e.g. W3C XML Schema allows for
* arbitrarily large/small/precise values, the request may be beyond the
* implementations capability.
* @throws NullPointerException if <code>duration</code> is <code>null</code>.
*
* @see #isShorterThan(Duration)
* @see #isLongerThan(Duration)
*/
public abstract int compare(final Duration duration);
/**
* <p>Checks if this duration object is strictly longer than
* another <code>Duration</code> object.</p>
*
* <p>Duration X is "longer" than Y if and only if X>Y
* as defined in the section 3.2.6.2 of the XML Schema 1.0
* specification.</p>
*
* <p>For example, "P1D" (one day) > "PT12H" (12 hours) and
* "P2Y" (two years) > "P23M" (23 months).</p>
*
* @param duration <code>Duration</code> to test this <code>Duration</code> against.
*
* @throws UnsupportedOperationException If the underlying implementation
* cannot reasonably process the request, e.g. W3C XML Schema allows for
* arbitrarily large/small/precise values, the request may be beyond the
* implementations capability.
* @throws NullPointerException If <code>duration</code> is null.
*
* @return
* true if the duration represented by this object
* is longer than the given duration. false otherwise.
*
* @see #isShorterThan(Duration)
* @see #compare(Duration duration)
*/
public boolean isLongerThan(final Duration duration) {
return compare(duration) == DatatypeConstants.GREATER;
}
/**
* <p>Checks if this duration object is strictly shorter than
* another <code>Duration</code> object.</p>
*
* @param duration <code>Duration</code> to test this <code>Duration</code> against.
*
* @return <code>true</code> if <code>duration</code> parameter is shorter than this <code>Duration</code>,
* else <code>false</code>.
*
* @throws UnsupportedOperationException If the underlying implementation
* cannot reasonably process the request, e.g. W3C XML Schema allows for
* arbitrarily large/small/precise values, the request may be beyond the
* implementations capability.
* @throws NullPointerException if <code>duration</code> is null.
*
* @see #isLongerThan(Duration duration)
* @see #compare(Duration duration)
*/
public boolean isShorterThan(final Duration duration) {
return compare(duration) == DatatypeConstants.LESSER;
}
/**
* <p>Checks if this duration object has the same duration
* as another <code>Duration</code> object.</p>
*
* <p>For example, "P1D" (1 day) is equal to "PT24H" (24 hours).</p>
*
* <p>Duration X is equal to Y if and only if time instant
* t+X and t+Y are the same for all the test time instants
* specified in the section 3.2.6.2 of the XML Schema 1.0
* specification.</p>
*
* <p>Note that there are cases where two <code>Duration</code>s are
* "incomparable" to each other, like one month and 30 days.
* For example,</p>
* <pre>
* !new Duration("P1M").isShorterThan(new Duration("P30D"))
* !new Duration("P1M").isLongerThan(new Duration("P30D"))
* !new Duration("P1M").equals(new Duration("P30D"))
* </pre>
*
* @param duration
* A non-null valid <code>Duration</code> object.
*
* @return
* <code>true</code> if this duration is the same length as
* <code>duration</code>.
* <code>false</code> if <code>duration</code> is not a
* <code>Duration</code> object, is <code>null</code>,
* or its length is different from this duration.
*
* @throws UnsupportedOperationException If the underlying implementation
* cannot reasonably process the request, e.g. W3C XML Schema allows for
* arbitrarily large/small/precise values, the request may be beyond the
* implementations capability.
*
* @see #compare(Duration duration)
*/
public boolean equals(final Object duration) {
if (duration == this) {
return true;
}
if (duration instanceof Duration) {
return compare((Duration) duration) == DatatypeConstants.EQUAL;
}
return false;
}
/**
* Returns a hash code consistent with the definition of the equals method.
*
* @see Object#hashCode()
*/
public abstract int hashCode();
/**
* <p>Returns a <code>String</code> representation of this <code>Duration</code> <code>Object</code>.</p>
*
* <p>The result is formatted according to the XML Schema 1.0 specification and can be always parsed back later into the
* equivalent <code>Duration</code> <code>Object</code> by {@link DatatypeFactory#newDuration(String lexicalRepresentation)}.</p>
*
* <p>Formally, the following holds for any <code>Duration</code>
* <code>Object</code> x:</p>
* <pre>
* new Duration(x.toString()).equals(x)
* </pre>
*
* @return A non-<code>null</code> valid <code>String</code> representation of this <code>Duration</code>.
*/
public String toString() {
StringBuilder buf = new StringBuilder();
if (getSign() < 0) {
buf.append('-');
}
buf.append('P');
BigInteger years = (BigInteger) getField(DatatypeConstants.YEARS);
if (years != null) {
buf.append(years).append('Y');
}
BigInteger months = (BigInteger) getField(DatatypeConstants.MONTHS);
if (months != null) {
buf.append(months).append('M');
}
BigInteger days = (BigInteger) getField(DatatypeConstants.DAYS);
if (days != null) {
buf.append(days).append('D');
}
BigInteger hours = (BigInteger) getField(DatatypeConstants.HOURS);
BigInteger minutes = (BigInteger) getField(DatatypeConstants.MINUTES);
BigDecimal seconds = (BigDecimal) getField(DatatypeConstants.SECONDS);
if (hours != null || minutes != null || seconds != null) {
buf.append('T');
if (hours != null) {
buf.append(hours).append('H');
}
if (minutes != null) {
buf.append(minutes).append('M');
}
if (seconds != null) {
buf.append(toString(seconds)).append('S');
}
}
return buf.toString();
}
/**
* <p>Turns {@link BigDecimal} to a string representation.</p>
*
* <p>Due to a behavior change in the {@link BigDecimal#toString()}
* method in JDK1.5, this had to be implemented here.</p>
*
* @param bd <code>BigDecimal</code> to format as a <code>String</code>
*
* @return <code>String</code> representation of <code>BigDecimal</code>
*/
private String toString(BigDecimal bd) {
String intString = bd.unscaledValue().toString();
int scale = bd.scale();
if (scale == 0) {
return intString;
}
/* Insert decimal point */
StringBuilder buf;
int insertionPoint = intString.length() - scale;
if (insertionPoint == 0) { /* Point goes right before intVal */
return "0." + intString;
}
else if (insertionPoint > 0) { /* Point goes inside intVal */
buf = new StringBuilder(intString);
buf.insert(insertionPoint, '.');
}
else { /* We must insert zeros between point and intVal */
buf = new StringBuilder(3 - insertionPoint + intString.length());
buf.append("0.");
for (int i = 0; i < -insertionPoint; i++) {
buf.append('0');
}
buf.append(intString);
}
return buf.toString();
}
/**
* <p>Calls the {@link Calendar#getTimeInMillis} method.
* Prior to JDK1.4, this method was protected and therefore
* cannot be invoked directly.</p>
*
* <p>TODO: In future, this should be replaced by <code>cal.getTimeInMillis()</code>.</p>
*
* @param cal <code>Calendar</code> to get time in milliseconds.
*
* @return Milliseconds of <code>cal</code>.
*/
private static long getCalendarTimeInMillis(final Calendar cal) {
return cal.getTime().getTime();
}
}
|
google/j2objc | 37,205 | jre_emul/android/platform/libcore/ojluni/src/main/java/java/util/TimSort.java | /*
* Copyright (c) 2009, 2013, Oracle and/or its affiliates. All rights reserved.
* Copyright 2009 Google Inc. All Rights Reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package java.util;
/**
* A stable, adaptive, iterative mergesort that requires far fewer than
* n lg(n) comparisons when running on partially sorted arrays, while
* offering performance comparable to a traditional mergesort when run
* on random arrays. Like all proper mergesorts, this sort is stable and
* runs O(n log n) time (worst case). In the worst case, this sort requires
* temporary storage space for n/2 object references; in the best case,
* it requires only a small constant amount of space.
*
* This implementation was adapted from Tim Peters's list sort for
* Python, which is described in detail here:
*
* http://svn.python.org/projects/python/trunk/Objects/listsort.txt
*
* Tim's C code may be found here:
*
* http://svn.python.org/projects/python/trunk/Objects/listobject.c
*
* The underlying techniques are described in this paper (and may have
* even earlier origins):
*
* "Optimistic Sorting and Information Theoretic Complexity"
* Peter McIlroy
* SODA (Fourth Annual ACM-SIAM Symposium on Discrete Algorithms),
* pp 467-474, Austin, Texas, 25-27 January 1993.
*
* While the API to this class consists solely of static methods, it is
* (privately) instantiable; a TimSort instance holds the state of an ongoing
* sort, assuming the input array is large enough to warrant the full-blown
* TimSort. Small arrays are sorted in place, using a binary insertion sort.
*
* @author Josh Bloch
*/
class TimSort<T> {
/**
* This is the minimum sized sequence that will be merged. Shorter
* sequences will be lengthened by calling binarySort. If the entire
* array is less than this length, no merges will be performed.
*
* This constant should be a power of two. It was 64 in Tim Peter's C
* implementation, but 32 was empirically determined to work better in
* this implementation. In the unlikely event that you set this constant
* to be a number that's not a power of two, you'll need to change the
* {@link #minRunLength} computation.
*
* If you decrease this constant, you must change the stackLen
* computation in the TimSort constructor, or you risk an
* ArrayOutOfBounds exception. See listsort.txt for a discussion
* of the minimum stack length required as a function of the length
* of the array being sorted and the minimum merge sequence length.
*/
private static final int MIN_MERGE = 32;
/**
* The array being sorted.
*/
private final T[] a;
/**
* The comparator for this sort.
*/
private final Comparator<? super T> c;
/**
* When we get into galloping mode, we stay there until both runs win less
* often than MIN_GALLOP consecutive times.
*/
private static final int MIN_GALLOP = 7;
/**
* This controls when we get *into* galloping mode. It is initialized
* to MIN_GALLOP. The mergeLo and mergeHi methods nudge it higher for
* random data, and lower for highly structured data.
*/
private int minGallop = MIN_GALLOP;
/**
* Maximum initial size of tmp array, which is used for merging. The array
* can grow to accommodate demand.
*
* Unlike Tim's original C version, we do not allocate this much storage
* when sorting smaller arrays. This change was required for performance.
*/
private static final int INITIAL_TMP_STORAGE_LENGTH = 256;
/**
* Temp storage for merges. A workspace array may optionally be
* provided in constructor, and if so will be used as long as it
* is big enough.
*/
private T[] tmp;
private int tmpBase; // base of tmp array slice
private int tmpLen; // length of tmp array slice
/**
* A stack of pending runs yet to be merged. Run i starts at
* address base[i] and extends for len[i] elements. It's always
* true (so long as the indices are in bounds) that:
*
* runBase[i] + runLen[i] == runBase[i + 1]
*
* so we could cut the storage for this, but it's a minor amount,
* and keeping all the info explicit simplifies the code.
*/
private int stackSize = 0; // Number of pending runs on stack
private final int[] runBase;
private final int[] runLen;
/**
* Creates a TimSort instance to maintain the state of an ongoing sort.
*
* @param a the array to be sorted
* @param c the comparator to determine the order of the sort
* @param work a workspace array (slice)
* @param workBase origin of usable space in work array
* @param workLen usable size of work array
*/
private TimSort(T[] a, Comparator<? super T> c, T[] work, int workBase, int workLen) {
this.a = a;
this.c = c;
// Allocate temp storage (which may be increased later if necessary)
int len = a.length;
int tlen = (len < 2 * INITIAL_TMP_STORAGE_LENGTH) ?
len >>> 1 : INITIAL_TMP_STORAGE_LENGTH;
if (work == null || workLen < tlen || workBase + tlen > work.length) {
@SuppressWarnings({"unchecked", "UnnecessaryLocalVariable"})
T[] newArray = (T[])java.lang.reflect.Array.newInstance
(a.getClass().getComponentType(), tlen);
tmp = newArray;
tmpBase = 0;
tmpLen = tlen;
}
else {
tmp = work;
tmpBase = workBase;
tmpLen = workLen;
}
/*
* Allocate runs-to-be-merged stack (which cannot be expanded). The
* stack length requirements are described in listsort.txt. The C
* version always uses the same stack length (85), but this was
* measured to be too expensive when sorting "mid-sized" arrays (e.g.,
* 100 elements) in Java. Therefore, we use smaller (but sufficiently
* large) stack lengths for smaller arrays. The "magic numbers" in the
* computation below must be changed if MIN_MERGE is decreased. See
* the MIN_MERGE declaration above for more information.
* The maximum value of 49 allows for an array up to length
* Integer.MAX_VALUE-4, if array is filled by the worst case stack size
* increasing scenario. More explanations are given in section 4 of:
* http://envisage-project.eu/wp-content/uploads/2015/02/sorting.pdf
*/
int stackLen = (len < 120 ? 5 :
len < 1542 ? 10 :
len < 119151 ? 24 : 49);
runBase = new int[stackLen];
runLen = new int[stackLen];
}
/*
* The next method (package private and static) constitutes the
* entire API of this class.
*/
/**
* Sorts the given range, using the given workspace array slice
* for temp storage when possible. This method is designed to be
* invoked from public methods (in class Arrays) after performing
* any necessary array bounds checks and expanding parameters into
* the required forms.
*
* @param a the array to be sorted
* @param lo the index of the first element, inclusive, to be sorted
* @param hi the index of the last element, exclusive, to be sorted
* @param c the comparator to use
* @param work a workspace array (slice)
* @param workBase origin of usable space in work array
* @param workLen usable size of work array
* @since 1.8
*/
static <T> void sort(T[] a, int lo, int hi, Comparator<? super T> c,
T[] work, int workBase, int workLen) {
assert c != null && a != null && lo >= 0 && lo <= hi && hi <= a.length;
int nRemaining = hi - lo;
if (nRemaining < 2)
return; // Arrays of size 0 and 1 are always sorted
// If array is small, do a "mini-TimSort" with no merges
if (nRemaining < MIN_MERGE) {
int initRunLen = countRunAndMakeAscending(a, lo, hi, c);
binarySort(a, lo, hi, lo + initRunLen, c);
return;
}
/**
* March over the array once, left to right, finding natural runs,
* extending short natural runs to minRun elements, and merging runs
* to maintain stack invariant.
*/
TimSort<T> ts = new TimSort<>(a, c, work, workBase, workLen);
int minRun = minRunLength(nRemaining);
do {
// Identify next run
int runLen = countRunAndMakeAscending(a, lo, hi, c);
// If run is short, extend to min(minRun, nRemaining)
if (runLen < minRun) {
int force = nRemaining <= minRun ? nRemaining : minRun;
binarySort(a, lo, lo + force, lo + runLen, c);
runLen = force;
}
// Push run onto pending-run stack, and maybe merge
ts.pushRun(lo, runLen);
ts.mergeCollapse();
// Advance to find next run
lo += runLen;
nRemaining -= runLen;
} while (nRemaining != 0);
// Merge all remaining runs to complete sort
assert lo == hi;
ts.mergeForceCollapse();
assert ts.stackSize == 1;
}
/**
* Sorts the specified portion of the specified array using a binary
* insertion sort. This is the best method for sorting small numbers
* of elements. It requires O(n log n) compares, but O(n^2) data
* movement (worst case).
*
* If the initial part of the specified range is already sorted,
* this method can take advantage of it: the method assumes that the
* elements from index {@code lo}, inclusive, to {@code start},
* exclusive are already sorted.
*
* @param a the array in which a range is to be sorted
* @param lo the index of the first element in the range to be sorted
* @param hi the index after the last element in the range to be sorted
* @param start the index of the first element in the range that is
* not already known to be sorted ({@code lo <= start <= hi})
* @param c comparator to used for the sort
*/
@SuppressWarnings("fallthrough")
private static <T> void binarySort(T[] a, int lo, int hi, int start,
Comparator<? super T> c) {
assert lo <= start && start <= hi;
if (start == lo)
start++;
for ( ; start < hi; start++) {
T pivot = a[start];
// Set left (and right) to the index where a[start] (pivot) belongs
int left = lo;
int right = start;
assert left <= right;
/*
* Invariants:
* pivot >= all in [lo, left).
* pivot < all in [right, start).
*/
while (left < right) {
int mid = (left + right) >>> 1;
if (c.compare(pivot, a[mid]) < 0)
right = mid;
else
left = mid + 1;
}
assert left == right;
/*
* The invariants still hold: pivot >= all in [lo, left) and
* pivot < all in [left, start), so pivot belongs at left. Note
* that if there are elements equal to pivot, left points to the
* first slot after them -- that's why this sort is stable.
* Slide elements over to make room for pivot.
*/
int n = start - left; // The number of elements to move
// Switch is just an optimization for arraycopy in default case
switch (n) {
case 2: a[left + 2] = a[left + 1];
case 1: a[left + 1] = a[left];
break;
default: System.arraycopy(a, left, a, left + 1, n);
}
a[left] = pivot;
}
}
/**
* Returns the length of the run beginning at the specified position in
* the specified array and reverses the run if it is descending (ensuring
* that the run will always be ascending when the method returns).
*
* A run is the longest ascending sequence with:
*
* a[lo] <= a[lo + 1] <= a[lo + 2] <= ...
*
* or the longest descending sequence with:
*
* a[lo] > a[lo + 1] > a[lo + 2] > ...
*
* For its intended use in a stable mergesort, the strictness of the
* definition of "descending" is needed so that the call can safely
* reverse a descending sequence without violating stability.
*
* @param a the array in which a run is to be counted and possibly reversed
* @param lo index of the first element in the run
* @param hi index after the last element that may be contained in the run.
* It is required that {@code lo < hi}.
* @param c the comparator to used for the sort
* @return the length of the run beginning at the specified position in
* the specified array
*/
private static <T> int countRunAndMakeAscending(T[] a, int lo, int hi,
Comparator<? super T> c) {
assert lo < hi;
int runHi = lo + 1;
if (runHi == hi)
return 1;
// Find end of run, and reverse range if descending
if (c.compare(a[runHi++], a[lo]) < 0) { // Descending
while (runHi < hi && c.compare(a[runHi], a[runHi - 1]) < 0)
runHi++;
reverseRange(a, lo, runHi);
} else { // Ascending
while (runHi < hi && c.compare(a[runHi], a[runHi - 1]) >= 0)
runHi++;
}
return runHi - lo;
}
/**
* Reverse the specified range of the specified array.
*
* @param a the array in which a range is to be reversed
* @param lo the index of the first element in the range to be reversed
* @param hi the index after the last element in the range to be reversed
*/
private static void reverseRange(Object[] a, int lo, int hi) {
hi--;
while (lo < hi) {
Object t = a[lo];
a[lo++] = a[hi];
a[hi--] = t;
}
}
/**
* Returns the minimum acceptable run length for an array of the specified
* length. Natural runs shorter than this will be extended with
* {@link #binarySort}.
*
* Roughly speaking, the computation is:
*
* If n < MIN_MERGE, return n (it's too small to bother with fancy stuff).
* Else if n is an exact power of 2, return MIN_MERGE/2.
* Else return an int k, MIN_MERGE/2 <= k <= MIN_MERGE, such that n/k
* is close to, but strictly less than, an exact power of 2.
*
* For the rationale, see listsort.txt.
*
* @param n the length of the array to be sorted
* @return the length of the minimum run to be merged
*/
private static int minRunLength(int n) {
assert n >= 0;
int r = 0; // Becomes 1 if any 1 bits are shifted off
while (n >= MIN_MERGE) {
r |= (n & 1);
n >>= 1;
}
return n + r;
}
/**
* Pushes the specified run onto the pending-run stack.
*
* @param runBase index of the first element in the run
* @param runLen the number of elements in the run
*/
private void pushRun(int runBase, int runLen) {
this.runBase[stackSize] = runBase;
this.runLen[stackSize] = runLen;
stackSize++;
}
/**
* Examines the stack of runs waiting to be merged and merges adjacent runs
* until the stack invariants are reestablished:
*
* 1. runLen[i - 3] > runLen[i - 2] + runLen[i - 1]
* 2. runLen[i - 2] > runLen[i - 1]
*
* This method is called each time a new run is pushed onto the stack,
* so the invariants are guaranteed to hold for i < stackSize upon
* entry to the method.
*
* Thanks to Stijn de Gouw, Jurriaan Rot, Frank S. de Boer,
* Richard Bubel and Reiner Hahnle, this is fixed with respect to
* the analysis in "On the Worst-Case Complexity of TimSort" by
* Nicolas Auger, Vincent Jug, Cyril Nicaud, and Carine Pivoteau.
*/
private void mergeCollapse() {
while (stackSize > 1) {
int n = stackSize - 2;
if (n > 0 && runLen[n-1] <= runLen[n] + runLen[n+1] ||
n > 1 && runLen[n-2] <= runLen[n] + runLen[n-1]) {
if (runLen[n - 1] < runLen[n + 1])
n--;
} else if (n < 0 || runLen[n] > runLen[n + 1]) {
break; // Invariant is established
}
mergeAt(n);
}
}
/**
* Merges all runs on the stack until only one remains. This method is
* called once, to complete the sort.
*/
private void mergeForceCollapse() {
while (stackSize > 1) {
int n = stackSize - 2;
if (n > 0 && runLen[n - 1] < runLen[n + 1])
n--;
mergeAt(n);
}
}
/**
* Merges the two runs at stack indices i and i+1. Run i must be
* the penultimate or antepenultimate run on the stack. In other words,
* i must be equal to stackSize-2 or stackSize-3.
*
* @param i stack index of the first of the two runs to merge
*/
private void mergeAt(int i) {
assert stackSize >= 2;
assert i >= 0;
assert i == stackSize - 2 || i == stackSize - 3;
int base1 = runBase[i];
int len1 = runLen[i];
int base2 = runBase[i + 1];
int len2 = runLen[i + 1];
assert len1 > 0 && len2 > 0;
assert base1 + len1 == base2;
/*
* Record the length of the combined runs; if i is the 3rd-last
* run now, also slide over the last run (which isn't involved
* in this merge). The current run (i+1) goes away in any case.
*/
runLen[i] = len1 + len2;
if (i == stackSize - 3) {
runBase[i + 1] = runBase[i + 2];
runLen[i + 1] = runLen[i + 2];
}
stackSize--;
/*
* Find where the first element of run2 goes in run1. Prior elements
* in run1 can be ignored (because they're already in place).
*/
int k = gallopRight(a[base2], a, base1, len1, 0, c);
assert k >= 0;
base1 += k;
len1 -= k;
if (len1 == 0)
return;
/*
* Find where the last element of run1 goes in run2. Subsequent elements
* in run2 can be ignored (because they're already in place).
*/
len2 = gallopLeft(a[base1 + len1 - 1], a, base2, len2, len2 - 1, c);
assert len2 >= 0;
if (len2 == 0)
return;
// Merge remaining runs, using tmp array with min(len1, len2) elements
if (len1 <= len2)
mergeLo(base1, len1, base2, len2);
else
mergeHi(base1, len1, base2, len2);
}
/**
* Locates the position at which to insert the specified key into the
* specified sorted range; if the range contains an element equal to key,
* returns the index of the leftmost equal element.
*
* @param key the key whose insertion point to search for
* @param a the array in which to search
* @param base the index of the first element in the range
* @param len the length of the range; must be > 0
* @param hint the index at which to begin the search, 0 <= hint < n.
* The closer hint is to the result, the faster this method will run.
* @param c the comparator used to order the range, and to search
* @return the int k, 0 <= k <= n such that a[b + k - 1] < key <= a[b + k],
* pretending that a[b - 1] is minus infinity and a[b + n] is infinity.
* In other words, key belongs at index b + k; or in other words,
* the first k elements of a should precede key, and the last n - k
* should follow it.
*/
private static <T> int gallopLeft(T key, T[] a, int base, int len, int hint,
Comparator<? super T> c) {
assert len > 0 && hint >= 0 && hint < len;
int lastOfs = 0;
int ofs = 1;
if (c.compare(key, a[base + hint]) > 0) {
// Gallop right until a[base+hint+lastOfs] < key <= a[base+hint+ofs]
int maxOfs = len - hint;
while (ofs < maxOfs && c.compare(key, a[base + hint + ofs]) > 0) {
lastOfs = ofs;
ofs = (ofs << 1) + 1;
if (ofs <= 0) // int overflow
ofs = maxOfs;
}
if (ofs > maxOfs)
ofs = maxOfs;
// Make offsets relative to base
lastOfs += hint;
ofs += hint;
} else { // key <= a[base + hint]
// Gallop left until a[base+hint-ofs] < key <= a[base+hint-lastOfs]
final int maxOfs = hint + 1;
while (ofs < maxOfs && c.compare(key, a[base + hint - ofs]) <= 0) {
lastOfs = ofs;
ofs = (ofs << 1) + 1;
if (ofs <= 0) // int overflow
ofs = maxOfs;
}
if (ofs > maxOfs)
ofs = maxOfs;
// Make offsets relative to base
int tmp = lastOfs;
lastOfs = hint - ofs;
ofs = hint - tmp;
}
assert -1 <= lastOfs && lastOfs < ofs && ofs <= len;
/*
* Now a[base+lastOfs] < key <= a[base+ofs], so key belongs somewhere
* to the right of lastOfs but no farther right than ofs. Do a binary
* search, with invariant a[base + lastOfs - 1] < key <= a[base + ofs].
*/
lastOfs++;
while (lastOfs < ofs) {
int m = lastOfs + ((ofs - lastOfs) >>> 1);
if (c.compare(key, a[base + m]) > 0)
lastOfs = m + 1; // a[base + m] < key
else
ofs = m; // key <= a[base + m]
}
assert lastOfs == ofs; // so a[base + ofs - 1] < key <= a[base + ofs]
return ofs;
}
/**
* Like gallopLeft, except that if the range contains an element equal to
* key, gallopRight returns the index after the rightmost equal element.
*
* @param key the key whose insertion point to search for
* @param a the array in which to search
* @param base the index of the first element in the range
* @param len the length of the range; must be > 0
* @param hint the index at which to begin the search, 0 <= hint < n.
* The closer hint is to the result, the faster this method will run.
* @param c the comparator used to order the range, and to search
* @return the int k, 0 <= k <= n such that a[b + k - 1] <= key < a[b + k]
*/
private static <T> int gallopRight(T key, T[] a, int base, int len,
int hint, Comparator<? super T> c) {
assert len > 0 && hint >= 0 && hint < len;
int ofs = 1;
int lastOfs = 0;
if (c.compare(key, a[base + hint]) < 0) {
// Gallop left until a[b+hint - ofs] <= key < a[b+hint - lastOfs]
int maxOfs = hint + 1;
while (ofs < maxOfs && c.compare(key, a[base + hint - ofs]) < 0) {
lastOfs = ofs;
ofs = (ofs << 1) + 1;
if (ofs <= 0) // int overflow
ofs = maxOfs;
}
if (ofs > maxOfs)
ofs = maxOfs;
// Make offsets relative to b
int tmp = lastOfs;
lastOfs = hint - ofs;
ofs = hint - tmp;
} else { // a[b + hint] <= key
// Gallop right until a[b+hint + lastOfs] <= key < a[b+hint + ofs]
int maxOfs = len - hint;
while (ofs < maxOfs && c.compare(key, a[base + hint + ofs]) >= 0) {
lastOfs = ofs;
ofs = (ofs << 1) + 1;
if (ofs <= 0) // int overflow
ofs = maxOfs;
}
if (ofs > maxOfs)
ofs = maxOfs;
// Make offsets relative to b
lastOfs += hint;
ofs += hint;
}
assert -1 <= lastOfs && lastOfs < ofs && ofs <= len;
/*
* Now a[b + lastOfs] <= key < a[b + ofs], so key belongs somewhere to
* the right of lastOfs but no farther right than ofs. Do a binary
* search, with invariant a[b + lastOfs - 1] <= key < a[b + ofs].
*/
lastOfs++;
while (lastOfs < ofs) {
int m = lastOfs + ((ofs - lastOfs) >>> 1);
if (c.compare(key, a[base + m]) < 0)
ofs = m; // key < a[b + m]
else
lastOfs = m + 1; // a[b + m] <= key
}
assert lastOfs == ofs; // so a[b + ofs - 1] <= key < a[b + ofs]
return ofs;
}
/**
* Merges two adjacent runs in place, in a stable fashion. The first
* element of the first run must be greater than the first element of the
* second run (a[base1] > a[base2]), and the last element of the first run
* (a[base1 + len1-1]) must be greater than all elements of the second run.
*
* For performance, this method should be called only when len1 <= len2;
* its twin, mergeHi should be called if len1 >= len2. (Either method
* may be called if len1 == len2.)
*
* @param base1 index of first element in first run to be merged
* @param len1 length of first run to be merged (must be > 0)
* @param base2 index of first element in second run to be merged
* (must be aBase + aLen)
* @param len2 length of second run to be merged (must be > 0)
*/
private void mergeLo(int base1, int len1, int base2, int len2) {
assert len1 > 0 && len2 > 0 && base1 + len1 == base2;
// Copy first run into temp array
T[] a = this.a; // For performance
T[] tmp = ensureCapacity(len1);
int cursor1 = tmpBase; // Indexes into tmp array
int cursor2 = base2; // Indexes int a
int dest = base1; // Indexes int a
System.arraycopy(a, base1, tmp, cursor1, len1);
// Move first element of second run and deal with degenerate cases
a[dest++] = a[cursor2++];
if (--len2 == 0) {
System.arraycopy(tmp, cursor1, a, dest, len1);
return;
}
if (len1 == 1) {
System.arraycopy(a, cursor2, a, dest, len2);
a[dest + len2] = tmp[cursor1]; // Last elt of run 1 to end of merge
return;
}
Comparator<? super T> c = this.c; // Use local variable for performance
int minGallop = this.minGallop; // " " " " "
outer:
while (true) {
int count1 = 0; // Number of times in a row that first run won
int count2 = 0; // Number of times in a row that second run won
/*
* Do the straightforward thing until (if ever) one run starts
* winning consistently.
*/
do {
assert len1 > 1 && len2 > 0;
if (c.compare(a[cursor2], tmp[cursor1]) < 0) {
a[dest++] = a[cursor2++];
count2++;
count1 = 0;
if (--len2 == 0)
break outer;
} else {
a[dest++] = tmp[cursor1++];
count1++;
count2 = 0;
if (--len1 == 1)
break outer;
}
} while ((count1 | count2) < minGallop);
/*
* One run is winning so consistently that galloping may be a
* huge win. So try that, and continue galloping until (if ever)
* neither run appears to be winning consistently anymore.
*/
do {
assert len1 > 1 && len2 > 0;
count1 = gallopRight(a[cursor2], tmp, cursor1, len1, 0, c);
if (count1 != 0) {
System.arraycopy(tmp, cursor1, a, dest, count1);
dest += count1;
cursor1 += count1;
len1 -= count1;
if (len1 <= 1) // len1 == 1 || len1 == 0
break outer;
}
a[dest++] = a[cursor2++];
if (--len2 == 0)
break outer;
count2 = gallopLeft(tmp[cursor1], a, cursor2, len2, 0, c);
if (count2 != 0) {
System.arraycopy(a, cursor2, a, dest, count2);
dest += count2;
cursor2 += count2;
len2 -= count2;
if (len2 == 0)
break outer;
}
a[dest++] = tmp[cursor1++];
if (--len1 == 1)
break outer;
minGallop--;
} while (count1 >= MIN_GALLOP | count2 >= MIN_GALLOP);
if (minGallop < 0)
minGallop = 0;
minGallop += 2; // Penalize for leaving gallop mode
} // End of "outer" loop
this.minGallop = minGallop < 1 ? 1 : minGallop; // Write back to field
if (len1 == 1) {
assert len2 > 0;
System.arraycopy(a, cursor2, a, dest, len2);
a[dest + len2] = tmp[cursor1]; // Last elt of run 1 to end of merge
} else if (len1 == 0) {
throw new IllegalArgumentException(
"Comparison method violates its general contract!");
} else {
assert len2 == 0;
assert len1 > 1;
System.arraycopy(tmp, cursor1, a, dest, len1);
}
}
/**
* Like mergeLo, except that this method should be called only if
* len1 >= len2; mergeLo should be called if len1 <= len2. (Either method
* may be called if len1 == len2.)
*
* @param base1 index of first element in first run to be merged
* @param len1 length of first run to be merged (must be > 0)
* @param base2 index of first element in second run to be merged
* (must be aBase + aLen)
* @param len2 length of second run to be merged (must be > 0)
*/
private void mergeHi(int base1, int len1, int base2, int len2) {
assert len1 > 0 && len2 > 0 && base1 + len1 == base2;
// Copy second run into temp array
T[] a = this.a; // For performance
T[] tmp = ensureCapacity(len2);
int tmpBase = this.tmpBase;
System.arraycopy(a, base2, tmp, tmpBase, len2);
int cursor1 = base1 + len1 - 1; // Indexes into a
int cursor2 = tmpBase + len2 - 1; // Indexes into tmp array
int dest = base2 + len2 - 1; // Indexes into a
// Move last element of first run and deal with degenerate cases
a[dest--] = a[cursor1--];
if (--len1 == 0) {
System.arraycopy(tmp, tmpBase, a, dest - (len2 - 1), len2);
return;
}
if (len2 == 1) {
dest -= len1;
cursor1 -= len1;
System.arraycopy(a, cursor1 + 1, a, dest + 1, len1);
a[dest] = tmp[cursor2];
return;
}
Comparator<? super T> c = this.c; // Use local variable for performance
int minGallop = this.minGallop; // " " " " "
outer:
while (true) {
int count1 = 0; // Number of times in a row that first run won
int count2 = 0; // Number of times in a row that second run won
/*
* Do the straightforward thing until (if ever) one run
* appears to win consistently.
*/
do {
assert len1 > 0 && len2 > 1;
if (c.compare(tmp[cursor2], a[cursor1]) < 0) {
a[dest--] = a[cursor1--];
count1++;
count2 = 0;
if (--len1 == 0)
break outer;
} else {
a[dest--] = tmp[cursor2--];
count2++;
count1 = 0;
if (--len2 == 1)
break outer;
}
} while ((count1 | count2) < minGallop);
/*
* One run is winning so consistently that galloping may be a
* huge win. So try that, and continue galloping until (if ever)
* neither run appears to be winning consistently anymore.
*/
do {
assert len1 > 0 && len2 > 1;
count1 = len1 - gallopRight(tmp[cursor2], a, base1, len1, len1 - 1, c);
if (count1 != 0) {
dest -= count1;
cursor1 -= count1;
len1 -= count1;
System.arraycopy(a, cursor1 + 1, a, dest + 1, count1);
if (len1 == 0)
break outer;
}
a[dest--] = tmp[cursor2--];
if (--len2 == 1)
break outer;
count2 = len2 - gallopLeft(a[cursor1], tmp, tmpBase, len2, len2 - 1, c);
if (count2 != 0) {
dest -= count2;
cursor2 -= count2;
len2 -= count2;
System.arraycopy(tmp, cursor2 + 1, a, dest + 1, count2);
if (len2 <= 1) // len2 == 1 || len2 == 0
break outer;
}
a[dest--] = a[cursor1--];
if (--len1 == 0)
break outer;
minGallop--;
} while (count1 >= MIN_GALLOP | count2 >= MIN_GALLOP);
if (minGallop < 0)
minGallop = 0;
minGallop += 2; // Penalize for leaving gallop mode
} // End of "outer" loop
this.minGallop = minGallop < 1 ? 1 : minGallop; // Write back to field
if (len2 == 1) {
assert len1 > 0;
dest -= len1;
cursor1 -= len1;
System.arraycopy(a, cursor1 + 1, a, dest + 1, len1);
a[dest] = tmp[cursor2]; // Move first elt of run2 to front of merge
} else if (len2 == 0) {
throw new IllegalArgumentException(
"Comparison method violates its general contract!");
} else {
assert len1 == 0;
assert len2 > 0;
System.arraycopy(tmp, tmpBase, a, dest - (len2 - 1), len2);
}
}
/**
* Ensures that the external array tmp has at least the specified
* number of elements, increasing its size if necessary. The size
* increases exponentially to ensure amortized linear time complexity.
*
* @param minCapacity the minimum required capacity of the tmp array
* @return tmp, whether or not it grew
*/
private T[] ensureCapacity(int minCapacity) {
if (tmpLen < minCapacity) {
// Compute smallest power of 2 > minCapacity
int newSize = -1 >>> Integer.numberOfLeadingZeros(minCapacity);
newSize++;
if (newSize < 0) // Not bloody likely!
newSize = minCapacity;
else
newSize = Math.min(newSize, a.length >>> 1);
@SuppressWarnings({"unchecked", "UnnecessaryLocalVariable"})
T[] newArray = (T[])java.lang.reflect.Array.newInstance
(a.getClass().getComponentType(), newSize);
tmp = newArray;
tmpLen = newSize;
tmpBase = 0;
}
return tmp;
}
}
|
googleapis/google-cloud-java | 36,935 | java-telcoautomation/proto-google-cloud-telcoautomation-v1/src/main/java/com/google/cloud/telcoautomation/v1/ListBlueprintsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/telcoautomation/v1/telcoautomation.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.telcoautomation.v1;
/**
*
*
* <pre>
* Response object for `ListBlueprints`.
* </pre>
*
* Protobuf type {@code google.cloud.telcoautomation.v1.ListBlueprintsResponse}
*/
public final class ListBlueprintsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.telcoautomation.v1.ListBlueprintsResponse)
ListBlueprintsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListBlueprintsResponse.newBuilder() to construct.
private ListBlueprintsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListBlueprintsResponse() {
blueprints_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListBlueprintsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.telcoautomation.v1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1_ListBlueprintsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.telcoautomation.v1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1_ListBlueprintsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.telcoautomation.v1.ListBlueprintsResponse.class,
com.google.cloud.telcoautomation.v1.ListBlueprintsResponse.Builder.class);
}
public static final int BLUEPRINTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.telcoautomation.v1.Blueprint> blueprints_;
/**
*
*
* <pre>
* The list of requested blueprints.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.telcoautomation.v1.Blueprint> getBlueprintsList() {
return blueprints_;
}
/**
*
*
* <pre>
* The list of requested blueprints.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.telcoautomation.v1.BlueprintOrBuilder>
getBlueprintsOrBuilderList() {
return blueprints_;
}
/**
*
*
* <pre>
* The list of requested blueprints.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code>
*/
@java.lang.Override
public int getBlueprintsCount() {
return blueprints_.size();
}
/**
*
*
* <pre>
* The list of requested blueprints.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code>
*/
@java.lang.Override
public com.google.cloud.telcoautomation.v1.Blueprint getBlueprints(int index) {
return blueprints_.get(index);
}
/**
*
*
* <pre>
* The list of requested blueprints.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code>
*/
@java.lang.Override
public com.google.cloud.telcoautomation.v1.BlueprintOrBuilder getBlueprintsOrBuilder(int index) {
return blueprints_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < blueprints_.size(); i++) {
output.writeMessage(1, blueprints_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < blueprints_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, blueprints_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.telcoautomation.v1.ListBlueprintsResponse)) {
return super.equals(obj);
}
com.google.cloud.telcoautomation.v1.ListBlueprintsResponse other =
(com.google.cloud.telcoautomation.v1.ListBlueprintsResponse) obj;
if (!getBlueprintsList().equals(other.getBlueprintsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getBlueprintsCount() > 0) {
hash = (37 * hash) + BLUEPRINTS_FIELD_NUMBER;
hash = (53 * hash) + getBlueprintsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.telcoautomation.v1.ListBlueprintsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.telcoautomation.v1.ListBlueprintsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1.ListBlueprintsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.telcoautomation.v1.ListBlueprintsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1.ListBlueprintsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.telcoautomation.v1.ListBlueprintsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1.ListBlueprintsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.telcoautomation.v1.ListBlueprintsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1.ListBlueprintsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.telcoautomation.v1.ListBlueprintsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.telcoautomation.v1.ListBlueprintsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.telcoautomation.v1.ListBlueprintsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.telcoautomation.v1.ListBlueprintsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response object for `ListBlueprints`.
* </pre>
*
* Protobuf type {@code google.cloud.telcoautomation.v1.ListBlueprintsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.telcoautomation.v1.ListBlueprintsResponse)
com.google.cloud.telcoautomation.v1.ListBlueprintsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.telcoautomation.v1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1_ListBlueprintsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.telcoautomation.v1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1_ListBlueprintsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.telcoautomation.v1.ListBlueprintsResponse.class,
com.google.cloud.telcoautomation.v1.ListBlueprintsResponse.Builder.class);
}
// Construct using com.google.cloud.telcoautomation.v1.ListBlueprintsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (blueprintsBuilder_ == null) {
blueprints_ = java.util.Collections.emptyList();
} else {
blueprints_ = null;
blueprintsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.telcoautomation.v1.TelcoautomationProto
.internal_static_google_cloud_telcoautomation_v1_ListBlueprintsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.telcoautomation.v1.ListBlueprintsResponse getDefaultInstanceForType() {
return com.google.cloud.telcoautomation.v1.ListBlueprintsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.telcoautomation.v1.ListBlueprintsResponse build() {
com.google.cloud.telcoautomation.v1.ListBlueprintsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.telcoautomation.v1.ListBlueprintsResponse buildPartial() {
com.google.cloud.telcoautomation.v1.ListBlueprintsResponse result =
new com.google.cloud.telcoautomation.v1.ListBlueprintsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.telcoautomation.v1.ListBlueprintsResponse result) {
if (blueprintsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
blueprints_ = java.util.Collections.unmodifiableList(blueprints_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.blueprints_ = blueprints_;
} else {
result.blueprints_ = blueprintsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.telcoautomation.v1.ListBlueprintsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.telcoautomation.v1.ListBlueprintsResponse) {
return mergeFrom((com.google.cloud.telcoautomation.v1.ListBlueprintsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.telcoautomation.v1.ListBlueprintsResponse other) {
if (other == com.google.cloud.telcoautomation.v1.ListBlueprintsResponse.getDefaultInstance())
return this;
if (blueprintsBuilder_ == null) {
if (!other.blueprints_.isEmpty()) {
if (blueprints_.isEmpty()) {
blueprints_ = other.blueprints_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureBlueprintsIsMutable();
blueprints_.addAll(other.blueprints_);
}
onChanged();
}
} else {
if (!other.blueprints_.isEmpty()) {
if (blueprintsBuilder_.isEmpty()) {
blueprintsBuilder_.dispose();
blueprintsBuilder_ = null;
blueprints_ = other.blueprints_;
bitField0_ = (bitField0_ & ~0x00000001);
blueprintsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getBlueprintsFieldBuilder()
: null;
} else {
blueprintsBuilder_.addAllMessages(other.blueprints_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.telcoautomation.v1.Blueprint m =
input.readMessage(
com.google.cloud.telcoautomation.v1.Blueprint.parser(), extensionRegistry);
if (blueprintsBuilder_ == null) {
ensureBlueprintsIsMutable();
blueprints_.add(m);
} else {
blueprintsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.telcoautomation.v1.Blueprint> blueprints_ =
java.util.Collections.emptyList();
private void ensureBlueprintsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
blueprints_ =
new java.util.ArrayList<com.google.cloud.telcoautomation.v1.Blueprint>(blueprints_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.telcoautomation.v1.Blueprint,
com.google.cloud.telcoautomation.v1.Blueprint.Builder,
com.google.cloud.telcoautomation.v1.BlueprintOrBuilder>
blueprintsBuilder_;
/**
*
*
* <pre>
* The list of requested blueprints.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code>
*/
public java.util.List<com.google.cloud.telcoautomation.v1.Blueprint> getBlueprintsList() {
if (blueprintsBuilder_ == null) {
return java.util.Collections.unmodifiableList(blueprints_);
} else {
return blueprintsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of requested blueprints.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code>
*/
public int getBlueprintsCount() {
if (blueprintsBuilder_ == null) {
return blueprints_.size();
} else {
return blueprintsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of requested blueprints.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code>
*/
public com.google.cloud.telcoautomation.v1.Blueprint getBlueprints(int index) {
if (blueprintsBuilder_ == null) {
return blueprints_.get(index);
} else {
return blueprintsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of requested blueprints.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code>
*/
public Builder setBlueprints(int index, com.google.cloud.telcoautomation.v1.Blueprint value) {
if (blueprintsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureBlueprintsIsMutable();
blueprints_.set(index, value);
onChanged();
} else {
blueprintsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of requested blueprints.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code>
*/
public Builder setBlueprints(
int index, com.google.cloud.telcoautomation.v1.Blueprint.Builder builderForValue) {
if (blueprintsBuilder_ == null) {
ensureBlueprintsIsMutable();
blueprints_.set(index, builderForValue.build());
onChanged();
} else {
blueprintsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of requested blueprints.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code>
*/
public Builder addBlueprints(com.google.cloud.telcoautomation.v1.Blueprint value) {
if (blueprintsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureBlueprintsIsMutable();
blueprints_.add(value);
onChanged();
} else {
blueprintsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of requested blueprints.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code>
*/
public Builder addBlueprints(int index, com.google.cloud.telcoautomation.v1.Blueprint value) {
if (blueprintsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureBlueprintsIsMutable();
blueprints_.add(index, value);
onChanged();
} else {
blueprintsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of requested blueprints.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code>
*/
public Builder addBlueprints(
com.google.cloud.telcoautomation.v1.Blueprint.Builder builderForValue) {
if (blueprintsBuilder_ == null) {
ensureBlueprintsIsMutable();
blueprints_.add(builderForValue.build());
onChanged();
} else {
blueprintsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of requested blueprints.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code>
*/
public Builder addBlueprints(
int index, com.google.cloud.telcoautomation.v1.Blueprint.Builder builderForValue) {
if (blueprintsBuilder_ == null) {
ensureBlueprintsIsMutable();
blueprints_.add(index, builderForValue.build());
onChanged();
} else {
blueprintsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of requested blueprints.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code>
*/
public Builder addAllBlueprints(
java.lang.Iterable<? extends com.google.cloud.telcoautomation.v1.Blueprint> values) {
if (blueprintsBuilder_ == null) {
ensureBlueprintsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, blueprints_);
onChanged();
} else {
blueprintsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of requested blueprints.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code>
*/
public Builder clearBlueprints() {
if (blueprintsBuilder_ == null) {
blueprints_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
blueprintsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of requested blueprints.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code>
*/
public Builder removeBlueprints(int index) {
if (blueprintsBuilder_ == null) {
ensureBlueprintsIsMutable();
blueprints_.remove(index);
onChanged();
} else {
blueprintsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of requested blueprints.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code>
*/
public com.google.cloud.telcoautomation.v1.Blueprint.Builder getBlueprintsBuilder(int index) {
return getBlueprintsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of requested blueprints.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code>
*/
public com.google.cloud.telcoautomation.v1.BlueprintOrBuilder getBlueprintsOrBuilder(
int index) {
if (blueprintsBuilder_ == null) {
return blueprints_.get(index);
} else {
return blueprintsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of requested blueprints.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code>
*/
public java.util.List<? extends com.google.cloud.telcoautomation.v1.BlueprintOrBuilder>
getBlueprintsOrBuilderList() {
if (blueprintsBuilder_ != null) {
return blueprintsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(blueprints_);
}
}
/**
*
*
* <pre>
* The list of requested blueprints.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code>
*/
public com.google.cloud.telcoautomation.v1.Blueprint.Builder addBlueprintsBuilder() {
return getBlueprintsFieldBuilder()
.addBuilder(com.google.cloud.telcoautomation.v1.Blueprint.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of requested blueprints.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code>
*/
public com.google.cloud.telcoautomation.v1.Blueprint.Builder addBlueprintsBuilder(int index) {
return getBlueprintsFieldBuilder()
.addBuilder(index, com.google.cloud.telcoautomation.v1.Blueprint.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of requested blueprints.
* </pre>
*
* <code>repeated .google.cloud.telcoautomation.v1.Blueprint blueprints = 1;</code>
*/
public java.util.List<com.google.cloud.telcoautomation.v1.Blueprint.Builder>
getBlueprintsBuilderList() {
return getBlueprintsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.telcoautomation.v1.Blueprint,
com.google.cloud.telcoautomation.v1.Blueprint.Builder,
com.google.cloud.telcoautomation.v1.BlueprintOrBuilder>
getBlueprintsFieldBuilder() {
if (blueprintsBuilder_ == null) {
blueprintsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.telcoautomation.v1.Blueprint,
com.google.cloud.telcoautomation.v1.Blueprint.Builder,
com.google.cloud.telcoautomation.v1.BlueprintOrBuilder>(
blueprints_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
blueprints_ = null;
}
return blueprintsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.telcoautomation.v1.ListBlueprintsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.telcoautomation.v1.ListBlueprintsResponse)
private static final com.google.cloud.telcoautomation.v1.ListBlueprintsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.telcoautomation.v1.ListBlueprintsResponse();
}
public static com.google.cloud.telcoautomation.v1.ListBlueprintsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListBlueprintsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListBlueprintsResponse>() {
@java.lang.Override
public ListBlueprintsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListBlueprintsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListBlueprintsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.telcoautomation.v1.ListBlueprintsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
hibernate/hibernate-orm | 34,323 | hibernate-core/src/test/java/org/hibernate/orm/test/cfg/persister/GoofyPersisterClassProvider.java | /*
* SPDX-License-Identifier: Apache-2.0
* Copyright Red Hat Inc. and Hibernate Authors
*/
package org.hibernate.orm.test.cfg.persister;
import java.io.Serializable;
import java.util.Collections;
import java.util.Comparator;
import java.util.List;
import java.util.Map;
import java.util.Set;
import java.util.function.BiConsumer;
import java.util.function.Consumer;
import org.checkerframework.checker.nullness.qual.Nullable;
import org.hibernate.Filter;
import org.hibernate.HibernateException;
import org.hibernate.LockMode;
import org.hibernate.LockOptions;
import org.hibernate.MappingException;
import org.hibernate.annotations.OnDeleteAction;
import org.hibernate.bytecode.internal.BytecodeEnhancementMetadataNonPojoImpl;
import org.hibernate.bytecode.spi.BytecodeEnhancementMetadata;
import org.hibernate.cache.spi.access.CollectionDataAccess;
import org.hibernate.cache.spi.access.EntityDataAccess;
import org.hibernate.cache.spi.access.NaturalIdDataAccess;
import org.hibernate.cache.spi.entry.CacheEntry;
import org.hibernate.cache.spi.entry.CacheEntryStructure;
import org.hibernate.collection.spi.CollectionSemantics;
import org.hibernate.collection.spi.PersistentCollection;
import org.hibernate.engine.internal.MutableEntityEntryFactory;
import org.hibernate.engine.spi.CascadeStyle;
import org.hibernate.engine.spi.EntityEntryFactory;
import org.hibernate.engine.spi.LoadQueryInfluencers;
import org.hibernate.engine.spi.SessionFactoryImplementor;
import org.hibernate.engine.spi.SharedSessionContractImplementor;
import org.hibernate.generator.Generator;
import org.hibernate.generator.values.GeneratedValues;
import org.hibernate.generator.values.GeneratedValuesMutationDelegate;
import org.hibernate.id.IdentifierGenerator;
import org.hibernate.internal.FilterAliasGenerator;
import org.hibernate.internal.util.IndexedConsumer;
import org.hibernate.loader.ast.spi.MultiIdLoadOptions;
import org.hibernate.mapping.Collection;
import org.hibernate.mapping.PersistentClass;
import org.hibernate.metamodel.mapping.AttributeMapping;
import org.hibernate.metamodel.mapping.AttributeMappingsList;
import org.hibernate.metamodel.mapping.AttributeMappingsMap;
import org.hibernate.metamodel.mapping.DiscriminatorType;
import org.hibernate.metamodel.mapping.EntityDiscriminatorMapping;
import org.hibernate.metamodel.mapping.EntityIdentifierMapping;
import org.hibernate.metamodel.mapping.EntityMappingType;
import org.hibernate.metamodel.mapping.EntityRowIdMapping;
import org.hibernate.metamodel.mapping.EntityVersionMapping;
import org.hibernate.metamodel.mapping.JdbcMapping;
import org.hibernate.metamodel.mapping.ModelPart;
import org.hibernate.metamodel.mapping.NaturalIdMapping;
import org.hibernate.metamodel.mapping.SelectableMapping;
import org.hibernate.metamodel.mapping.TableDetails;
import org.hibernate.metamodel.model.domain.NavigableRole;
import org.hibernate.metamodel.spi.EntityRepresentationStrategy;
import org.hibernate.metamodel.spi.RuntimeModelCreationContext;
import org.hibernate.persister.collection.CollectionPersister;
import org.hibernate.persister.entity.EntityPersister;
import org.hibernate.persister.entity.UniqueKeyEntry;
import org.hibernate.persister.entity.mutation.DeleteCoordinator;
import org.hibernate.persister.entity.mutation.EntityTableMapping;
import org.hibernate.persister.entity.mutation.InsertCoordinator;
import org.hibernate.persister.entity.mutation.UpdateCoordinator;
import org.hibernate.persister.spi.PersisterClassResolver;
import org.hibernate.query.sqm.mutation.spi.SqmMultiTableInsertStrategy;
import org.hibernate.query.sqm.mutation.spi.SqmMultiTableMutationStrategy;
import org.hibernate.spi.NavigablePath;
import org.hibernate.sql.ast.spi.SqlAstCreationState;
import org.hibernate.sql.ast.spi.SqlSelection;
import org.hibernate.sql.ast.tree.from.TableGroup;
import org.hibernate.sql.ast.tree.predicate.Predicate;
import org.hibernate.sql.model.ast.builder.MutationGroupBuilder;
import org.hibernate.sql.results.graph.DomainResult;
import org.hibernate.sql.results.graph.DomainResultCreationState;
import org.hibernate.tuple.entity.EntityMetamodel;
import org.hibernate.type.BasicType;
import org.hibernate.type.CollectionType;
import org.hibernate.type.Type;
import org.hibernate.type.descriptor.java.JavaType;
/**
* @author Emmanuel Bernard
*/
public class GoofyPersisterClassProvider implements PersisterClassResolver {
@Override
public Class<? extends EntityPersister> getEntityPersisterClass(PersistentClass metadata) {
return NoopEntityPersister.class;
}
@Override
public Class<? extends CollectionPersister> getCollectionPersisterClass(Collection metadata) {
return NoopCollectionPersister.class;
}
public static class NoopEntityPersister implements EntityPersister {
public NoopEntityPersister(
final PersistentClass persistentClass,
final EntityDataAccess cacheAccessStrategy,
final NaturalIdDataAccess naturalIdRegionAccessStrategy,
final RuntimeModelCreationContext creationContext) {
throw new GoofyException(NoopEntityPersister.class);
}
@Override
public boolean managesColumns(String[] columnNames) {
return false;
}
@Override
public NavigableRole getNavigableRole() {
return null;
}
@Override
public BytecodeEnhancementMetadata getInstrumentationMetadata() {
return new BytecodeEnhancementMetadataNonPojoImpl( null );
}
@Override
public void postInstantiate() throws MappingException {
}
@Override
public SessionFactoryImplementor getFactory() {
return null;
}
@Override
public EntityEntryFactory getEntityEntryFactory() {
return MutableEntityEntryFactory.INSTANCE;
}
@Override
public String getRootEntityName() {
return null;
}
@Override
public String getEntityName() {
return null;
}
@Override
public @Nullable String getJpaEntityName() {
return null;
}
@Override
public TableDetails getMappedTableDetails() {
throw new UnsupportedOperationException();
}
@Override
public TableDetails getIdentifierTableDetails() {
throw new UnsupportedOperationException();
}
@Override
public void forEachTableDetails(Consumer<TableDetails> consumer) {
throw new UnsupportedOperationException();
}
@Override
public ModelPart findSubPart(
String name, EntityMappingType targetType) {
return null;
}
@Override
public void visitSubParts(Consumer<ModelPart> consumer, EntityMappingType targetType) {
}
@Override
public <T> DomainResult<T> createDomainResult(
NavigablePath navigablePath,
TableGroup tableGroup,
String resultVariable,
DomainResultCreationState creationState) {
return null;
}
@Override
public void applySqlSelections(
NavigablePath navigablePath, TableGroup tableGroup, DomainResultCreationState creationState) {
}
@Override
public void applySqlSelections(
NavigablePath navigablePath,
TableGroup tableGroup,
DomainResultCreationState creationState,
BiConsumer<SqlSelection, JdbcMapping> selectionConsumer) {
}
@Override
public JdbcMapping getJdbcMapping(int index) {
throw new IndexOutOfBoundsException( index );
}
@Override
public int forEachJdbcType(
int offset, IndexedConsumer<JdbcMapping> action) {
return 0;
}
@Override
public Object disassemble(Object value, SharedSessionContractImplementor session) {
return null;
}
@Override
public <X, Y> int forEachDisassembledJdbcValue(
Object value,
int offset,
X x,
Y y,
JdbcValuesBiConsumer<X, Y> valuesConsumer,
SharedSessionContractImplementor session) {
return 0;
}
@Override
public boolean isExplicitPolymorphism() {
return false;
}
@Override
public SqmMultiTableMutationStrategy getSqmMultiTableMutationStrategy() {
return null;
}
@Override
public SqmMultiTableInsertStrategy getSqmMultiTableInsertStrategy() {
return null;
}
@Override
public AttributeMapping findDeclaredAttributeMapping(String name) {
return null;
}
@Override
public AttributeMappingsMap getDeclaredAttributeMappings() {
return null;
}
@Override
public void visitDeclaredAttributeMappings(Consumer<? super AttributeMapping> action) {
}
@Override
public EntityMetamodel getEntityMetamodel() {
return null;
}
@Override
public boolean isSubclassEntityName(String entityName) {
return false;
}
@Override
public String[] getPropertySpaces() {
return new String[0];
}
@Override
public Serializable[] getQuerySpaces() {
return new Serializable[0];
}
@Override
public boolean hasProxy() {
return false;
}
@Override
public boolean hasCollections() {
return false;
}
@Override
public boolean hasMutableProperties() {
return false;
}
@Override
public boolean hasSubselectLoadableCollections() {
return false;
}
@Override
public boolean hasCollectionNotReferencingPK() {
return false;
}
@Override
public boolean hasCascades() {
return false;
}
@Override
public boolean hasCascadeDelete() {
return false;
}
@Override
public boolean hasToOnes() {
return false;
}
@Override
public boolean hasCascadePersist() {
return false;
}
@Override
public boolean hasOwnedCollections() {
return false;
}
@Override
public boolean isMutable() {
return false;
}
@Override
public boolean isInherited() {
return false;
}
@Override
public boolean isIdentifierAssignedByInsert() {
return false;
}
@Override
public Type getPropertyType(String propertyName) throws MappingException {
return null;
}
@Override
public int[] findDirty(Object[] currentState, Object[] previousState, Object owner, SharedSessionContractImplementor session) {
return new int[0];
}
@Override
public int[] findModified(Object[] old, Object[] current, Object object, SharedSessionContractImplementor session) {
return new int[0];
}
@Override
public boolean hasIdentifierProperty() {
return false;
}
@Override
public boolean canExtractIdOutOfEntity() {
return false;
}
@Override
public boolean isVersioned() {
return false;
}
@Override
public BasicType<?> getVersionType() {
return null;
}
@Override
public int getVersionPropertyIndex() {
return 0;
}
@Override
public boolean hasNaturalIdentifier() {
return false;
}
@Override
public int[] getNaturalIdentifierProperties() {
return new int[0];
}
@Override
public Object[] getNaturalIdentifierSnapshot(Object id, SharedSessionContractImplementor session) {
return new Object[0];
}
@Override
public Serializable loadEntityIdByNaturalId(
Object[] naturalIdValues, LockOptions lockOptions, SharedSessionContractImplementor session) {
return null;
}
@Override
public IdentifierGenerator getIdentifierGenerator() {
return null;
}
@Override
public boolean hasLazyProperties() {
return false;
}
@Override
public Object load(Object id, Object optionalObject, LockMode lockMode, SharedSessionContractImplementor session) {
return null;
}
@Override
public Object load(Object id, Object optionalObject, LockOptions lockOptions, SharedSessionContractImplementor session) {
return null;
}
@Override
public List<?> multiLoad(Object[] ids, SharedSessionContractImplementor session, MultiIdLoadOptions loadOptions) {
return Collections.emptyList();
}
@Override
public void lock(Object id, Object version, Object object, LockMode lockMode, SharedSessionContractImplementor session) {
}
@Override
public void lock(Object id, Object version, Object object, LockOptions lockOptions, SharedSessionContractImplementor session) {
}
@Override
public InsertCoordinator getInsertCoordinator() {
return null;
}
@Override
public UpdateCoordinator getUpdateCoordinator() {
return null;
}
@Override
public DeleteCoordinator getDeleteCoordinator() {
return null;
}
@Override
public Type[] getPropertyTypes() {
return new Type[0];
}
@Override
public String[] getPropertyNames() {
return new String[0];
}
@Override
public boolean[] getPropertyInsertability() {
return new boolean[0];
}
@Override
public boolean[] getPropertyUpdateability() {
return new boolean[0];
}
@Override
public boolean[] getPropertyCheckability() {
return new boolean[0];
}
@Override
public boolean[] getPropertyNullability() {
return new boolean[0];
}
@Override
public boolean[] getPropertyVersionability() {
return new boolean[0];
}
@Override
public boolean[] getPropertyLaziness() {
return new boolean[0];
}
@Override
public CascadeStyle[] getPropertyCascadeStyles() {
return new CascadeStyle[0];
}
@Override
public Type getIdentifierType() {
return null;
}
@Override
public String getIdentifierPropertyName() {
return null;
}
@Override
public boolean isCacheInvalidationRequired() {
return false;
}
@Override
public boolean isLazyPropertiesCacheable() {
return false;
}
@Override
public boolean canReadFromCache() {
return false;
}
@Override
public boolean canWriteToCache() {
return false;
}
@Override
public boolean hasCache() {
return false;
}
@Override
public EntityDataAccess getCacheAccessStrategy() {
return null;
}
@Override
public boolean hasNaturalIdCache() {
return false;
}
@Override
public NaturalIdDataAccess getNaturalIdCacheAccessStrategy() {
return null;
}
@Override
public CacheEntryStructure getCacheEntryStructure() {
return null;
}
@Override
public CacheEntry buildCacheEntry(
Object entity, Object[] state, Object version, SharedSessionContractImplementor session) {
return null;
}
@Override
public boolean isSelectBeforeUpdateRequired() {
return false;
}
@Override
public Object[] getDatabaseSnapshot(Object id, SharedSessionContractImplementor session) throws HibernateException {
return new Object[0];
}
@Override
public Object getIdByUniqueKey(Object key, String uniquePropertyName, SharedSessionContractImplementor session) {
throw new UnsupportedOperationException( "not supported" );
}
@Override
public Object getCurrentVersion(Object id, SharedSessionContractImplementor session) throws HibernateException {
return null;
}
@Override
public Object forceVersionIncrement(Object id, Object currentVersion, SharedSessionContractImplementor session) {
return null;
}
@Override
public boolean isInstrumented() {
return false;
}
@Override
public boolean hasInsertGeneratedProperties() {
return false;
}
@Override
public boolean hasUpdateGeneratedProperties() {
return false;
}
@Override
public boolean isVersionPropertyGenerated() {
return false;
}
@Override
public void afterInitialize(Object entity, SharedSessionContractImplementor session) {
}
@Override
public void afterReassociate(Object entity, SharedSessionContractImplementor session) {
}
@Override
public Object createProxy(Object id, SharedSessionContractImplementor session) throws HibernateException {
return null;
}
@Override
public Boolean isTransient(Object object, SharedSessionContractImplementor session) throws HibernateException {
return null;
}
@Override
public Object[] getPropertyValuesToInsert(Object object, Map mergeMap, SharedSessionContractImplementor session) {
return new Object[0];
}
@Override
public void processInsertGeneratedProperties(Object id, Object entity, Object[] state, GeneratedValues generatedValues, SharedSessionContractImplementor session) {
}
@Override
public void processUpdateGeneratedProperties(Object id, Object entity, Object[] state, GeneratedValues generatedValues, SharedSessionContractImplementor session) {
}
@Override
public Class<?> getMappedClass() {
return null;
}
@Override
public Class<?> getConcreteProxyClass() {
return null;
}
@Override
public void setPropertyValues(Object object, Object[] values) {
}
@Override
public void setPropertyValue(Object object, int i, Object value) {
}
@Override
public Object[] getPropertyValues(Object object) {
return new Object[0];
}
@Override
public Object getPropertyValue(Object object, int i) {
return null;
}
@Override
public Object getPropertyValue(Object object, String propertyName) {
return null;
}
@Override
public Serializable getIdentifier(Object entity, SharedSessionContractImplementor session) {
return null;
}
@Override
public void setIdentifier(Object entity, Object id, SharedSessionContractImplementor session) {
}
@Override
public Object getVersion(Object object) {
return null;
}
@Override
public Object instantiate(Object id, SharedSessionContractImplementor session) {
return null;
}
@Override
public boolean isInstance(Object object) {
return false;
}
@Override
public boolean hasUninitializedLazyProperties(Object object) {
return false;
}
@Override
public void resetIdentifier(Object entity, Object currentId, Object currentVersion, SharedSessionContractImplementor session) {
}
@Override
public EntityPersister getSubclassEntityPersister(Object instance, SessionFactoryImplementor factory) {
return null;
}
@Override
public FilterAliasGenerator getFilterAliasGenerator(String rootAlias) {
// TODO Auto-generated method stub
return null;
}
@Override
public EntityPersister getEntityPersister() {
return this;
}
@Override
public EntityIdentifierMapping getIdentifierMapping() {
return null;
}
@Override
public EntityVersionMapping getVersionMapping() {
return null;
}
@Override
public EntityRowIdMapping getRowIdMapping() {
return null;
}
@Override
public void visitConstraintOrderedTables(ConstraintOrderedTableConsumer consumer) {
}
@Override
public EntityDiscriminatorMapping getDiscriminatorMapping() {
return null;
}
@Override
public Object getDiscriminatorValue() {
return null;
}
@Override
public NaturalIdMapping getNaturalIdMapping() {
return null;
}
@Override
public boolean isTypeOrSuperType(EntityMappingType targetType) {
return targetType == this;
}
@Override
public EntityRepresentationStrategy getRepresentationStrategy() {
return null;
}
@Override
public int[] resolveAttributeIndexes(String[] attributeNames) {
return null;
}
@Override
public boolean canUseReferenceCacheEntries() {
return false;
}
@Override
public boolean useShallowQueryCacheLayout() {
return false;
}
@Override
public boolean storeDiscriminatorInShallowQueryCacheLayout() {
return false;
}
@Override
public boolean hasFilterForLoadByKey() {
return false;
}
@Override
public Iterable<UniqueKeyEntry> uniqueKeyEntries() {
return Collections.emptyList();
}
@Override
public String getSelectByUniqueKeyString(String propertyName) {
return null;
}
@Override
public String getSelectByUniqueKeyString(String[] propertyNames, String[] columnNames) {
return null;
}
@Override
public String[] getRootTableKeyColumnNames() {
return new String[0];
}
@Override
public String getIdentitySelectString() {
return null;
}
@Override
public String[] getIdentifierColumnNames() {
return new String[0];
}
@Override
public boolean isAffectedByEnabledFilters(LoadQueryInfluencers influencers, boolean onlyApplyForLoadByKeyFilters) {
return false;
}
@Override
public boolean isAffectedByEntityGraph(LoadQueryInfluencers influencers) {
return false;
}
@Override
public boolean isAffectedByEnabledFetchProfiles(LoadQueryInfluencers influencers) {
return false;
}
@Override
public AttributeMappingsList getAttributeMappings() {
return null;
}
@Override
public void forEachAttributeMapping(Consumer<? super AttributeMapping> action) {
}
@Override
public JavaType getMappedJavaType() {
return null;
}
@Override
public EntityMappingType getTargetPart() {
return null;
}
@Override
public void forEachMutableTable(Consumer<EntityTableMapping> consumer) {
}
@Override
public void forEachMutableTableReverse(Consumer<EntityTableMapping> consumer) {
}
@Override
public String getIdentifierTableName() {
return null;
}
@Override
public EntityTableMapping getIdentifierTableMapping() {
return null;
}
@Override
public ModelPart getIdentifierDescriptor() {
return null;
}
@Override
public GeneratedValuesMutationDelegate getInsertDelegate() {
return null;
}
@Override
public GeneratedValuesMutationDelegate getUpdateDelegate() {
return null;
}
@Override
public String getTableName() {
return "";
}
@Override
public String[] getIdentifierAliases(String suffix) {
return new String[0];
}
@Override
public String getRootTableName() {
return "";
}
@Override
public String[] getRootTableIdentifierColumnNames() {
return new String[0];
}
@Override
public String getVersionColumnName() {
return "";
}
@Override
public String[] getPropertyAliases(String suffix, int i) {
return new String[0];
}
@Override
public String getDiscriminatorAlias(String suffix) {
return "";
}
@Override
public String getDiscriminatorColumnName() {
return "";
}
@Override
public Type getDiscriminatorType() {
return null;
}
@Override
public boolean hasRowId() {
return false;
}
@Override
public String[] getSubclassPropertyColumnAliases(String propertyName, String suffix) {
return new String[0];
}
@Override
public String[] getPropertyColumnNames(String propertyPath) {
return new String[0];
}
@Override
public String selectFragment(String alias, String suffix) {
return "";
}
@Override
public DiscriminatorType<?> getDiscriminatorDomainType() {
return null;
}
@Override
public String[] toColumns(String propertyName) {
return new String[0];
}
@Override
public boolean[] getNonLazyPropertyUpdateability() {
return new boolean[0];
}
@Override
public boolean hasMultipleTables() {
return false;
}
@Override
public String[] getTableNames() {
return new String[0];
}
@Override
public String getTableName(int j) {
return "";
}
@Override
public String[] getKeyColumns(int j) {
return new String[0];
}
@Override
public int getTableSpan() {
return 0;
}
@Override
public boolean isInverseTable(int j) {
return false;
}
@Override
public boolean isNullableTable(int j) {
return false;
}
@Override
public boolean hasDuplicateTables() {
return false;
}
@Override
public int getSubclassTableSpan() {
return 0;
}
@Override
public String getSubclassTableName(int j) {
return "";
}
@Override
public String getTableNameForColumn(String columnName) {
return "";
}
@Override
public String[] getSubclassPropertyColumnNames(int i) {
return new String[0];
}
@Override
public int countSubclassProperties() {
return 0;
}
@Override
public boolean isSharedColumn(String columnExpression) {
return false;
}
@Override
public String[][] getConstraintOrderedTableKeyColumnClosure() {
return new String[0][];
}
@Override
public EntityTableMapping[] getTableMappings() {
return new EntityTableMapping[0];
}
@Override
public String physicalTableNameForMutation(SelectableMapping selectableMapping) {
return "";
}
@Override
public void addDiscriminatorToInsertGroup(MutationGroupBuilder insertGroupBuilder) {
}
@Override
public void addSoftDeleteToInsertGroup(MutationGroupBuilder insertGroupBuilder) {
}
@Override
public String getAttributeMutationTableName(int i) {
return "";
}
@Override
public boolean isPolymorphic() {
return false;
}
@Override
public boolean isDynamicUpdate() {
return false;
}
@Override
public boolean isDynamicInsert() {
return false;
}
@Override
public OnDeleteAction[] getPropertyOnDeleteActions() {
return new OnDeleteAction[0];
}
@Override
public Generator[] getGenerators() {
return new Generator[0];
}
@Override
public boolean hasImmutableNaturalId() {
return false;
}
@Override
public boolean isNaturalIdentifierInsertGenerated() {
return false;
}
@Override
public boolean isLazy() {
return false;
}
@Override
public int getPropertySpan() {
return 0;
}
@Override
public boolean hasPreInsertGeneratedProperties() {
return false;
}
@Override
public boolean hasPreUpdateGeneratedProperties() {
return false;
}
}
public static class NoopCollectionPersister implements CollectionPersister {
public NoopCollectionPersister(
Collection collectionBinding,
CollectionDataAccess cacheAccessStrategy,
RuntimeModelCreationContext creationContext) {
throw new GoofyException(NoopCollectionPersister.class);
}
@Override
public NavigableRole getNavigableRole() {
return null;
}
public void initialize(Object key, SharedSessionContractImplementor session) throws HibernateException {
//To change body of implemented methods use File | Settings | File Templates.
}
public boolean hasCache() {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public CollectionDataAccess getCacheAccessStrategy() {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public CacheEntryStructure getCacheEntryStructure() {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public boolean useShallowQueryCacheLayout() {
return false;
}
public CollectionType getCollectionType() {
throw new UnsupportedOperationException();
}
public Type getKeyType() {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public Type getIndexType() {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public Type getElementType() {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public Class<?> getElementClass() {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isPrimitiveArray() {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isArray() {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isOneToMany() {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isManyToMany() {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean hasIndex() {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isLazy() {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isInverse() {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public void remove(Object id, SharedSessionContractImplementor session) throws HibernateException {
//To change body of implemented methods use File | Settings | File Templates.
}
public void recreate(PersistentCollection<?> collection, Object key, SharedSessionContractImplementor session)
throws HibernateException {
//To change body of implemented methods use File | Settings | File Templates.
}
public void deleteRows(PersistentCollection<?> collection, Object key, SharedSessionContractImplementor session)
throws HibernateException {
//To change body of implemented methods use File | Settings | File Templates.
}
public void updateRows(PersistentCollection<?> collection, Object key, SharedSessionContractImplementor session)
throws HibernateException {
//To change body of implemented methods use File | Settings | File Templates.
}
public void insertRows(PersistentCollection<?> collection, Object key, SharedSessionContractImplementor session)
throws HibernateException {
//To change body of implemented methods use File | Settings | File Templates.
}
public String getRole() {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public EntityPersister getOwnerEntityPersister() {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public IdentifierGenerator getIdentifierGenerator() {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public Type getIdentifierType() {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean hasOrphanDelete() {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean hasOrdering() {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean hasManyToManyOrdering() {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public String[] getCollectionSpaces() {
return new String[0]; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isCascadeDeleteEnabled() {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isVersioned() {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isMutable() {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public void postInstantiate() throws MappingException {
//To change body of implemented methods use File | Settings | File Templates.
}
public SessionFactoryImplementor getFactory() {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean isAffectedByEnabledFilters(SharedSessionContractImplementor session) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public String[] getKeyColumnAliases(String suffix) {
return new String[0]; //To change body of implemented methods use File | Settings | File Templates.
}
public String[] getIndexColumnAliases(String suffix) {
return new String[0]; //To change body of implemented methods use File | Settings | File Templates.
}
public String[] getElementColumnAliases(String suffix) {
return new String[0]; //To change body of implemented methods use File | Settings | File Templates.
}
public String getIdentifierColumnAlias(String suffix) {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
public int getSize(Object key, SharedSessionContractImplementor session) {
return 0; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean indexExists(Object key, Object index, SharedSessionContractImplementor session) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public boolean elementExists(Object key, Object element, SharedSessionContractImplementor session) {
return false; //To change body of implemented methods use File | Settings | File Templates.
}
public Object getElementByIndex(Object key, Object index, SharedSessionContractImplementor session, Object owner) {
return null; //To change body of implemented methods use File | Settings | File Templates.
}
@Override
public String getMappedByProperty() {
return null;
}
@Override
public Comparator<?> getSortingComparator() {
return null;
}
@Override
public CollectionSemantics<?,?> getCollectionSemantics() {
return null;
}
@Override
public void applyBaseManyToManyRestrictions(Consumer<Predicate> predicateConsumer, TableGroup tableGroup, boolean useQualifier, Map<String, Filter> enabledFilters, Set<String> treatAsDeclarations, SqlAstCreationState creationState) {
}
@Override
public void processQueuedOps(PersistentCollection<?> collection, Object key, SharedSessionContractImplementor session)
throws HibernateException {
}
@Override
public void applyFilterRestrictions(
Consumer<Predicate> predicateConsumer,
TableGroup tableGroup,
boolean useQualifier,
Map<String, Filter> enabledFilters,
boolean onlyApplyLoadByKeyFilters,
SqlAstCreationState creationState) {
}
@Override
public void applyBaseRestrictions(Consumer<Predicate> predicateConsumer, TableGroup tableGroup, boolean useQualifier, Map<String, Filter> enabledFilters, Set<String> treatAsDeclarations, SqlAstCreationState creationState) {
}
@Override
public void applyBaseRestrictions(
Consumer<Predicate> predicateConsumer,
TableGroup tableGroup,
boolean useQualifier,
Map<String, Filter> enabledFilters,
boolean onlyApplyLoadByKeyFilters,
Set<String> treatAsDeclarations,
SqlAstCreationState creationState) {
}
@Override
public boolean hasWhereRestrictions() {
return false;
}
@Override
public void applyWhereRestrictions(Consumer<Predicate> predicateConsumer, TableGroup tableGroup, boolean useQualifier, SqlAstCreationState creationState) {
}
@Override
public String getIdentifierColumnName() {
return "";
}
@Override
public String getTableName() {
return "";
}
@Override
public String selectFragment(String alias, String columnSuffix) {
return "";
}
@Override
public String[] getCollectionPropertyColumnAliases(String propertyName, String string) {
return new String[0];
}
@Override
public EntityPersister getElementPersister() {
return null;
}
}
}
|
googleapis/google-cloud-java | 37,010 | java-discoveryengine/proto-google-cloud-discoveryengine-v1beta/src/main/java/com/google/cloud/discoveryengine/v1beta/ListEvaluationResultsRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/discoveryengine/v1beta/evaluation_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.discoveryengine.v1beta;
/**
*
*
* <pre>
* Request message for
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1beta.EvaluationService.ListEvaluationResults]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest}
*/
public final class ListEvaluationResultsRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest)
ListEvaluationResultsRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListEvaluationResultsRequest.newBuilder() to construct.
private ListEvaluationResultsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListEvaluationResultsRequest() {
evaluation_ = "";
pageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListEvaluationResultsRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1beta.EvaluationServiceProto
.internal_static_google_cloud_discoveryengine_v1beta_ListEvaluationResultsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1beta.EvaluationServiceProto
.internal_static_google_cloud_discoveryengine_v1beta_ListEvaluationResultsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest.class,
com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest.Builder.class);
}
public static final int EVALUATION_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object evaluation_ = "";
/**
*
*
* <pre>
* Required. The evaluation resource name, such as
* `projects/{project}/locations/{location}/evaluations/{evaluation}`.
*
* If the caller does not have permission to list [EvaluationResult][]
* under this evaluation, regardless of whether or not this evaluation
* set exists, a `PERMISSION_DENIED` error is returned.
* </pre>
*
* <code>
* string evaluation = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The evaluation.
*/
@java.lang.Override
public java.lang.String getEvaluation() {
java.lang.Object ref = evaluation_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
evaluation_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The evaluation resource name, such as
* `projects/{project}/locations/{location}/evaluations/{evaluation}`.
*
* If the caller does not have permission to list [EvaluationResult][]
* under this evaluation, regardless of whether or not this evaluation
* set exists, a `PERMISSION_DENIED` error is returned.
* </pre>
*
* <code>
* string evaluation = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for evaluation.
*/
@java.lang.Override
public com.google.protobuf.ByteString getEvaluationBytes() {
java.lang.Object ref = evaluation_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
evaluation_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int PAGE_SIZE_FIELD_NUMBER = 2;
private int pageSize_ = 0;
/**
*
*
* <pre>
* Maximum number of [EvaluationResult][] to return. If unspecified,
* defaults to 100. The maximum allowed value is 1000. Values above 1000 will
* be coerced to 1000.
*
* If this field is negative, an `INVALID_ARGUMENT` error is returned.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
public static final int PAGE_TOKEN_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A page token
* [ListEvaluationResultsResponse.next_page_token][google.cloud.discoveryengine.v1beta.ListEvaluationResultsResponse.next_page_token],
* received from a previous
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1beta.EvaluationService.ListEvaluationResults]
* call. Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1beta.EvaluationService.ListEvaluationResults]
* must match the call that provided the page token. Otherwise, an
* `INVALID_ARGUMENT` error is returned.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
@java.lang.Override
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A page token
* [ListEvaluationResultsResponse.next_page_token][google.cloud.discoveryengine.v1beta.ListEvaluationResultsResponse.next_page_token],
* received from a previous
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1beta.EvaluationService.ListEvaluationResults]
* call. Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1beta.EvaluationService.ListEvaluationResults]
* must match the call that provided the page token. Otherwise, an
* `INVALID_ARGUMENT` error is returned.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(evaluation_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, evaluation_);
}
if (pageSize_ != 0) {
output.writeInt32(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(evaluation_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, evaluation_);
}
if (pageSize_ != 0) {
size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_);
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest)) {
return super.equals(obj);
}
com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest other =
(com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest) obj;
if (!getEvaluation().equals(other.getEvaluation())) return false;
if (getPageSize() != other.getPageSize()) return false;
if (!getPageToken().equals(other.getPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + EVALUATION_FIELD_NUMBER;
hash = (53 * hash) + getEvaluation().hashCode();
hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER;
hash = (53 * hash) + getPageSize();
hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest
parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest
parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request message for
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1beta.EvaluationService.ListEvaluationResults]
* method.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest)
com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1beta.EvaluationServiceProto
.internal_static_google_cloud_discoveryengine_v1beta_ListEvaluationResultsRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1beta.EvaluationServiceProto
.internal_static_google_cloud_discoveryengine_v1beta_ListEvaluationResultsRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest.class,
com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest.Builder.class);
}
// Construct using
// com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
evaluation_ = "";
pageSize_ = 0;
pageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.discoveryengine.v1beta.EvaluationServiceProto
.internal_static_google_cloud_discoveryengine_v1beta_ListEvaluationResultsRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest
getDefaultInstanceForType() {
return com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest
.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest build() {
com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest buildPartial() {
com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest result =
new com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.evaluation_ = evaluation_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.pageSize_ = pageSize_;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.pageToken_ = pageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest) {
return mergeFrom(
(com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest other) {
if (other
== com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest
.getDefaultInstance()) return this;
if (!other.getEvaluation().isEmpty()) {
evaluation_ = other.evaluation_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.getPageSize() != 0) {
setPageSize(other.getPageSize());
}
if (!other.getPageToken().isEmpty()) {
pageToken_ = other.pageToken_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
evaluation_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 16:
{
pageSize_ = input.readInt32();
bitField0_ |= 0x00000002;
break;
} // case 16
case 26:
{
pageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object evaluation_ = "";
/**
*
*
* <pre>
* Required. The evaluation resource name, such as
* `projects/{project}/locations/{location}/evaluations/{evaluation}`.
*
* If the caller does not have permission to list [EvaluationResult][]
* under this evaluation, regardless of whether or not this evaluation
* set exists, a `PERMISSION_DENIED` error is returned.
* </pre>
*
* <code>
* string evaluation = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The evaluation.
*/
public java.lang.String getEvaluation() {
java.lang.Object ref = evaluation_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
evaluation_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The evaluation resource name, such as
* `projects/{project}/locations/{location}/evaluations/{evaluation}`.
*
* If the caller does not have permission to list [EvaluationResult][]
* under this evaluation, regardless of whether or not this evaluation
* set exists, a `PERMISSION_DENIED` error is returned.
* </pre>
*
* <code>
* string evaluation = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for evaluation.
*/
public com.google.protobuf.ByteString getEvaluationBytes() {
java.lang.Object ref = evaluation_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
evaluation_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The evaluation resource name, such as
* `projects/{project}/locations/{location}/evaluations/{evaluation}`.
*
* If the caller does not have permission to list [EvaluationResult][]
* under this evaluation, regardless of whether or not this evaluation
* set exists, a `PERMISSION_DENIED` error is returned.
* </pre>
*
* <code>
* string evaluation = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The evaluation to set.
* @return This builder for chaining.
*/
public Builder setEvaluation(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
evaluation_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The evaluation resource name, such as
* `projects/{project}/locations/{location}/evaluations/{evaluation}`.
*
* If the caller does not have permission to list [EvaluationResult][]
* under this evaluation, regardless of whether or not this evaluation
* set exists, a `PERMISSION_DENIED` error is returned.
* </pre>
*
* <code>
* string evaluation = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearEvaluation() {
evaluation_ = getDefaultInstance().getEvaluation();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The evaluation resource name, such as
* `projects/{project}/locations/{location}/evaluations/{evaluation}`.
*
* If the caller does not have permission to list [EvaluationResult][]
* under this evaluation, regardless of whether or not this evaluation
* set exists, a `PERMISSION_DENIED` error is returned.
* </pre>
*
* <code>
* string evaluation = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for evaluation to set.
* @return This builder for chaining.
*/
public Builder setEvaluationBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
evaluation_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private int pageSize_;
/**
*
*
* <pre>
* Maximum number of [EvaluationResult][] to return. If unspecified,
* defaults to 100. The maximum allowed value is 1000. Values above 1000 will
* be coerced to 1000.
*
* If this field is negative, an `INVALID_ARGUMENT` error is returned.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return The pageSize.
*/
@java.lang.Override
public int getPageSize() {
return pageSize_;
}
/**
*
*
* <pre>
* Maximum number of [EvaluationResult][] to return. If unspecified,
* defaults to 100. The maximum allowed value is 1000. Values above 1000 will
* be coerced to 1000.
*
* If this field is negative, an `INVALID_ARGUMENT` error is returned.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @param value The pageSize to set.
* @return This builder for chaining.
*/
public Builder setPageSize(int value) {
pageSize_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Maximum number of [EvaluationResult][] to return. If unspecified,
* defaults to 100. The maximum allowed value is 1000. Values above 1000 will
* be coerced to 1000.
*
* If this field is negative, an `INVALID_ARGUMENT` error is returned.
* </pre>
*
* <code>int32 page_size = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageSize() {
bitField0_ = (bitField0_ & ~0x00000002);
pageSize_ = 0;
onChanged();
return this;
}
private java.lang.Object pageToken_ = "";
/**
*
*
* <pre>
* A page token
* [ListEvaluationResultsResponse.next_page_token][google.cloud.discoveryengine.v1beta.ListEvaluationResultsResponse.next_page_token],
* received from a previous
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1beta.EvaluationService.ListEvaluationResults]
* call. Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1beta.EvaluationService.ListEvaluationResults]
* must match the call that provided the page token. Otherwise, an
* `INVALID_ARGUMENT` error is returned.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The pageToken.
*/
public java.lang.String getPageToken() {
java.lang.Object ref = pageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
pageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A page token
* [ListEvaluationResultsResponse.next_page_token][google.cloud.discoveryengine.v1beta.ListEvaluationResultsResponse.next_page_token],
* received from a previous
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1beta.EvaluationService.ListEvaluationResults]
* call. Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1beta.EvaluationService.ListEvaluationResults]
* must match the call that provided the page token. Otherwise, an
* `INVALID_ARGUMENT` error is returned.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return The bytes for pageToken.
*/
public com.google.protobuf.ByteString getPageTokenBytes() {
java.lang.Object ref = pageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
pageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A page token
* [ListEvaluationResultsResponse.next_page_token][google.cloud.discoveryengine.v1beta.ListEvaluationResultsResponse.next_page_token],
* received from a previous
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1beta.EvaluationService.ListEvaluationResults]
* call. Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1beta.EvaluationService.ListEvaluationResults]
* must match the call that provided the page token. Otherwise, an
* `INVALID_ARGUMENT` error is returned.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* A page token
* [ListEvaluationResultsResponse.next_page_token][google.cloud.discoveryengine.v1beta.ListEvaluationResultsResponse.next_page_token],
* received from a previous
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1beta.EvaluationService.ListEvaluationResults]
* call. Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1beta.EvaluationService.ListEvaluationResults]
* must match the call that provided the page token. Otherwise, an
* `INVALID_ARGUMENT` error is returned.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearPageToken() {
pageToken_ = getDefaultInstance().getPageToken();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* A page token
* [ListEvaluationResultsResponse.next_page_token][google.cloud.discoveryengine.v1beta.ListEvaluationResultsResponse.next_page_token],
* received from a previous
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1beta.EvaluationService.ListEvaluationResults]
* call. Provide this to retrieve the subsequent page.
*
* When paginating, all other parameters provided to
* [EvaluationService.ListEvaluationResults][google.cloud.discoveryengine.v1beta.EvaluationService.ListEvaluationResults]
* must match the call that provided the page token. Otherwise, an
* `INVALID_ARGUMENT` error is returned.
* </pre>
*
* <code>string page_token = 3;</code>
*
* @param value The bytes for pageToken to set.
* @return This builder for chaining.
*/
public Builder setPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
pageToken_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest)
private static final com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest();
}
public static com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListEvaluationResultsRequest> PARSER =
new com.google.protobuf.AbstractParser<ListEvaluationResultsRequest>() {
@java.lang.Override
public ListEvaluationResultsRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListEvaluationResultsRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListEvaluationResultsRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1beta.ListEvaluationResultsRequest
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/ozone | 37,293 | hadoop-hdds/container-service/src/test/java/org/apache/hadoop/ozone/container/ozoneimpl/TestContainerReader.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.ozone.container.ozoneimpl;
import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerDataProto.State.DELETED;
import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerDataProto.State.RECOVERING;
import static org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos.ContainerDataProto.State.UNHEALTHY;
import static org.apache.hadoop.ozone.container.checksum.ContainerMerkleTreeTestUtils.verifyAllDataChecksumsMatch;
import static org.apache.hadoop.ozone.container.common.ContainerTestUtils.createDbInstancesForTestIfNeeded;
import static org.apache.hadoop.ozone.container.common.ContainerTestUtils.getKeyValueHandler;
import static org.apache.hadoop.ozone.container.common.impl.ContainerImplTestUtils.newContainerSet;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNotSame;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.anyList;
import static org.mockito.Mockito.anyLong;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.io.File;
import java.io.IOException;
import java.nio.file.Files;
import java.nio.file.Path;
import java.nio.file.Paths;
import java.util.ArrayList;
import java.util.List;
import java.util.UUID;
import org.apache.hadoop.conf.StorageUnit;
import org.apache.hadoop.hdds.client.BlockID;
import org.apache.hadoop.hdds.conf.OzoneConfiguration;
import org.apache.hadoop.hdds.protocol.datanode.proto.ContainerProtos;
import org.apache.hadoop.hdds.scm.ScmConfigKeys;
import org.apache.hadoop.hdds.scm.container.ContainerID;
import org.apache.hadoop.hdds.utils.db.InMemoryTestTable;
import org.apache.hadoop.hdds.utils.db.Table;
import org.apache.hadoop.ozone.OzoneConfigKeys;
import org.apache.hadoop.ozone.OzoneConsts;
import org.apache.hadoop.ozone.container.checksum.ContainerChecksumTreeManager;
import org.apache.hadoop.ozone.container.checksum.ContainerMerkleTreeTestUtils;
import org.apache.hadoop.ozone.container.checksum.ContainerMerkleTreeWriter;
import org.apache.hadoop.ozone.container.common.helpers.BlockData;
import org.apache.hadoop.ozone.container.common.helpers.ChunkInfo;
import org.apache.hadoop.ozone.container.common.impl.ContainerLayoutVersion;
import org.apache.hadoop.ozone.container.common.impl.ContainerSet;
import org.apache.hadoop.ozone.container.common.interfaces.Container;
import org.apache.hadoop.ozone.container.common.interfaces.DBHandle;
import org.apache.hadoop.ozone.container.common.interfaces.VolumeChoosingPolicy;
import org.apache.hadoop.ozone.container.common.utils.ContainerCache;
import org.apache.hadoop.ozone.container.common.utils.StorageVolumeUtil;
import org.apache.hadoop.ozone.container.common.volume.HddsVolume;
import org.apache.hadoop.ozone.container.common.volume.MutableVolumeSet;
import org.apache.hadoop.ozone.container.common.volume.RoundRobinVolumeChoosingPolicy;
import org.apache.hadoop.ozone.container.common.volume.StorageVolume;
import org.apache.hadoop.ozone.container.common.volume.VolumeSet;
import org.apache.hadoop.ozone.container.keyvalue.ContainerTestVersionInfo;
import org.apache.hadoop.ozone.container.keyvalue.KeyValueContainer;
import org.apache.hadoop.ozone.container.keyvalue.KeyValueContainerData;
import org.apache.hadoop.ozone.container.keyvalue.KeyValueHandler;
import org.apache.hadoop.ozone.container.keyvalue.helpers.BlockUtils;
import org.apache.hadoop.ozone.container.metadata.ContainerCreateInfo;
import org.apache.hadoop.ozone.container.metadata.DatanodeStoreSchemaThreeImpl;
import org.apache.hadoop.ozone.container.metadata.WitnessedContainerMetadataStore;
import org.apache.hadoop.util.Time;
import org.apache.ozone.test.GenericTestUtils.LogCapturer;
import org.apache.ratis.util.FileUtils;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.io.TempDir;
/**
* Test ContainerReader class which loads containers from disks.
*/
public class TestContainerReader {
private MutableVolumeSet volumeSet;
private HddsVolume hddsVolume;
private ContainerSet containerSet;
private WitnessedContainerMetadataStore mockMetadataStore;
private OzoneConfiguration conf;
private RoundRobinVolumeChoosingPolicy volumeChoosingPolicy;
private UUID datanodeId;
private String clusterId = UUID.randomUUID().toString();
private int blockCount = 10;
private long blockLen = 1024;
private ContainerLayoutVersion layout;
private KeyValueHandler keyValueHandler;
@TempDir
private Path tempDir;
private void setup(ContainerTestVersionInfo versionInfo) throws Exception {
setLayoutAndSchemaVersion(versionInfo);
File volumeDir =
Files.createDirectory(tempDir.resolve("volumeDir")).toFile();
this.conf = new OzoneConfiguration();
volumeSet = mock(MutableVolumeSet.class);
mockMetadataStore = mock(WitnessedContainerMetadataStore.class);
when(mockMetadataStore.getContainerCreateInfoTable()).thenReturn(new InMemoryTestTable<>());
containerSet = newContainerSet(1000, mockMetadataStore);
datanodeId = UUID.randomUUID();
hddsVolume = new HddsVolume.Builder(volumeDir
.getAbsolutePath()).conf(conf).datanodeUuid(datanodeId
.toString()).clusterID(clusterId).build();
StorageVolumeUtil.checkVolume(hddsVolume, clusterId, clusterId, conf,
null, null);
volumeSet = mock(MutableVolumeSet.class);
volumeChoosingPolicy = mock(RoundRobinVolumeChoosingPolicy.class);
when(volumeChoosingPolicy.chooseVolume(anyList(), anyLong()))
.thenReturn(hddsVolume);
for (int i = 0; i < 2; i++) {
KeyValueContainerData keyValueContainerData =
new KeyValueContainerData(i, layout, (long) StorageUnit.GB.toBytes(5),
UUID.randomUUID().toString(), datanodeId.toString());
KeyValueContainer keyValueContainer =
new KeyValueContainer(keyValueContainerData,
conf);
keyValueContainer.create(volumeSet, volumeChoosingPolicy, clusterId);
List<Long> blkNames;
if (i % 2 == 0) {
blkNames = addBlocks(keyValueContainer, true);
markBlocksForDelete(keyValueContainer, true, blkNames, i);
} else {
blkNames = addBlocks(keyValueContainer, false);
markBlocksForDelete(keyValueContainer, false, blkNames, i);
}
}
// Close the RocksDB instance for this container and remove from the cache
// so it does not affect the ContainerReader, which avoids using the cache
// at startup for performance reasons.
ContainerCache.getInstance(conf).shutdownCache();
keyValueHandler = getKeyValueHandler(conf, UUID.randomUUID().toString(), containerSet, volumeSet);
}
@AfterEach
public void cleanup() {
BlockUtils.shutdownCache(conf);
}
private void markBlocksForDelete(KeyValueContainer keyValueContainer,
boolean setMetaData, List<Long> blockNames, int count) throws Exception {
KeyValueContainerData cData = keyValueContainer.getContainerData();
try (DBHandle metadataStore = BlockUtils.getDB(cData, conf)) {
for (int i = 0; i < count; i++) {
Table<String, BlockData> blockDataTable =
metadataStore.getStore().getBlockDataTable();
Long localID = blockNames.get(i);
String blk = cData.getBlockKey(localID);
BlockData blkInfo = blockDataTable.get(blk);
blockDataTable.delete(blk);
blockDataTable.put(cData.getDeletingBlockKey(localID), blkInfo);
}
if (setMetaData) {
// Pending delete blocks are still counted towards the block count
// and bytes used metadata values, so those do not change.
Table<String, Long> metadataTable =
metadataStore.getStore().getMetadataTable();
metadataTable.put(cData.getPendingDeleteBlockCountKey(),
(long)count);
}
}
}
private List<Long> addBlocks(KeyValueContainer keyValueContainer,
boolean setMetaData) throws Exception {
long containerId = keyValueContainer.getContainerData().getContainerID();
KeyValueContainerData cData = keyValueContainer.getContainerData();
List<Long> blkNames = new ArrayList<>();
try (DBHandle metadataStore = BlockUtils.getDB(cData, conf)) {
for (int i = 0; i < blockCount; i++) {
// Creating BlockData
BlockID blockID = new BlockID(containerId, i);
BlockData blockData = new BlockData(blockID);
blockData.addMetadata(OzoneConsts.VOLUME, OzoneConsts.OZONE);
blockData.addMetadata(OzoneConsts.OWNER,
OzoneConsts.OZONE_SIMPLE_HDFS_USER);
List<ContainerProtos.ChunkInfo> chunkList = new ArrayList<>();
long localBlockID = blockID.getLocalID();
ChunkInfo info = new ChunkInfo(String.format(
"%d.data.%d", localBlockID, 0), 0, blockLen);
chunkList.add(info.getProtoBufMessage());
blockData.setChunks(chunkList);
blkNames.add(localBlockID);
metadataStore.getStore().getBlockDataTable()
.put(cData.getBlockKey(localBlockID), blockData);
}
if (setMetaData) {
metadataStore.getStore().getMetadataTable()
.put(cData.getBlockCountKey(), (long) blockCount);
metadataStore.getStore().getMetadataTable()
.put(cData.getBytesUsedKey(), blockCount * blockLen);
}
}
return blkNames;
}
@ContainerTestVersionInfo.ContainerTest
public void testContainerReader(ContainerTestVersionInfo versionInfo)
throws Exception {
setLayoutAndSchemaVersion(versionInfo);
setup(versionInfo);
ContainerReader containerReader = new ContainerReader(volumeSet,
hddsVolume, containerSet, conf, true);
Thread thread = new Thread(containerReader);
thread.start();
thread.join();
long originalCommittedBytes = hddsVolume.getCommittedBytes();
ContainerCache.getInstance(conf).shutdownCache();
long recoveringContainerId = 10;
KeyValueContainerData recoveringContainerData = new KeyValueContainerData(
recoveringContainerId, layout, (long) StorageUnit.GB.toBytes(5),
UUID.randomUUID().toString(), datanodeId.toString());
//create a container with recovering state
recoveringContainerData.setState(RECOVERING);
KeyValueContainer recoveringKeyValueContainer =
new KeyValueContainer(recoveringContainerData,
conf);
recoveringKeyValueContainer.create(
volumeSet, volumeChoosingPolicy, clusterId);
thread = new Thread(containerReader);
thread.start();
thread.join();
// no change, only open containers have committed space
assertEquals(originalCommittedBytes, hddsVolume.getCommittedBytes());
// Ratis replicated recovering containers are deleted upon datanode startup
if (recoveringKeyValueContainer.getContainerData().getReplicaIndex() == 0) {
assertNull(containerSet.getContainer(recoveringContainerData.getContainerID()));
assertEquals(2, containerSet.containerCount());
} else {
//recovering container should be marked unhealthy, so the count should be 3
assertEquals(UNHEALTHY, containerSet.getContainer(
recoveringContainerData.getContainerID()).getContainerState());
assertEquals(3, containerSet.containerCount());
}
for (int i = 0; i < 2; i++) {
Container keyValueContainer = containerSet.getContainer(i);
KeyValueContainerData keyValueContainerData = (KeyValueContainerData)
keyValueContainer.getContainerData();
// Verify block related metadata.
assertEquals(blockCount,
keyValueContainerData.getBlockCount());
assertEquals(blockCount * blockLen,
keyValueContainerData.getBytesUsed());
assertEquals(i,
keyValueContainerData.getNumPendingDeletionBlocks());
assertTrue(keyValueContainerData.isCommittedSpace());
}
}
@ContainerTestVersionInfo.ContainerTest
public void testContainerReaderWithLoadException(
ContainerTestVersionInfo versionInfo) throws Exception {
setLayoutAndSchemaVersion(versionInfo);
setup(versionInfo);
MutableVolumeSet volumeSet1;
HddsVolume hddsVolume1;
ContainerSet containerSet1 = newContainerSet();
File volumeDir1 =
Files.createDirectory(tempDir.resolve("volumeDir" + 1)).toFile();
RoundRobinVolumeChoosingPolicy volumeChoosingPolicy1;
volumeSet1 = mock(MutableVolumeSet.class);
UUID datanode = UUID.randomUUID();
hddsVolume1 = new HddsVolume.Builder(volumeDir1
.getAbsolutePath()).conf(conf).datanodeUuid(datanode
.toString()).clusterID(clusterId).build();
StorageVolumeUtil.checkVolume(hddsVolume1, clusterId, clusterId, conf,
null, null);
volumeChoosingPolicy1 = mock(RoundRobinVolumeChoosingPolicy.class);
when(volumeChoosingPolicy1.chooseVolume(anyList(), anyLong()))
.thenReturn(hddsVolume1);
int containerCount = 3;
for (int i = 0; i < containerCount; i++) {
KeyValueContainerData keyValueContainerData = new KeyValueContainerData(i,
layout,
(long) StorageUnit.GB.toBytes(5), UUID.randomUUID().toString(),
datanodeId.toString());
KeyValueContainer keyValueContainer =
new KeyValueContainer(keyValueContainerData, conf);
keyValueContainer.create(volumeSet1, volumeChoosingPolicy1, clusterId);
if (i == 0) {
// rename first container directory name
String containerPathStr =
keyValueContainer.getContainerData().getContainerPath();
File containerPath = new File(containerPathStr);
String renamePath = containerPathStr + "-aa";
assertTrue(containerPath.renameTo(new File(renamePath)));
}
}
ContainerCache.getInstance(conf).shutdownCache();
ContainerReader containerReader = new ContainerReader(volumeSet1,
hddsVolume1, containerSet1, conf, true);
containerReader.readVolume(hddsVolume1.getHddsRootDir());
assertEquals(containerCount - 1, containerSet1.containerCount());
for (Container c : containerSet1.getContainerMap().values()) {
if (c.getContainerData().getContainerID() == 0) {
assertFalse(c.getContainerData().isCommittedSpace());
} else {
assertTrue(c.getContainerData().isCommittedSpace());
}
}
assertEquals(hddsVolume1.getCommittedBytes(), (containerCount - 1) * StorageUnit.GB.toBytes(5));
}
@ContainerTestVersionInfo.ContainerTest
public void testContainerReaderWithInvalidDbPath(
ContainerTestVersionInfo versionInfo) throws Exception {
setLayoutAndSchemaVersion(versionInfo);
setup(versionInfo);
MutableVolumeSet volumeSet1;
HddsVolume hddsVolume1;
ContainerSet containerSet1 = newContainerSet();
File volumeDir1 =
Files.createDirectory(tempDir.resolve("volumeDirDbDelete")).toFile();
RoundRobinVolumeChoosingPolicy volumeChoosingPolicy1;
volumeSet1 = mock(MutableVolumeSet.class);
UUID datanode = UUID.randomUUID();
hddsVolume1 = new HddsVolume.Builder(volumeDir1
.getAbsolutePath()).conf(conf).datanodeUuid(datanode
.toString()).clusterID(clusterId).build();
StorageVolumeUtil.checkVolume(hddsVolume1, clusterId, clusterId, conf,
null, null);
volumeChoosingPolicy1 = mock(RoundRobinVolumeChoosingPolicy.class);
when(volumeChoosingPolicy1.chooseVolume(anyList(), anyLong()))
.thenReturn(hddsVolume1);
List<File> dbPathList = new ArrayList<>();
int containerCount = 3;
for (int i = 0; i < containerCount; i++) {
KeyValueContainerData keyValueContainerData = new KeyValueContainerData(i,
layout,
(long) StorageUnit.GB.toBytes(5), UUID.randomUUID().toString(),
datanodeId.toString());
KeyValueContainer keyValueContainer =
new KeyValueContainer(keyValueContainerData, conf);
keyValueContainer.create(volumeSet1, volumeChoosingPolicy1, clusterId);
dbPathList.add(keyValueContainerData.getDbFile());
}
ContainerCache.getInstance(conf).shutdownCache();
for (File dbPath : dbPathList) {
FileUtils.deleteFully(dbPath.toPath());
}
LogCapturer dnLogs = LogCapturer.captureLogs(ContainerReader.class);
dnLogs.clearOutput();
ContainerReader containerReader = new ContainerReader(volumeSet1,
hddsVolume1, containerSet1, conf, true);
containerReader.readVolume(hddsVolume1.getHddsRootDir());
assertEquals(0, containerSet1.containerCount());
assertEquals(0, hddsVolume1.getCommittedBytes());
assertThat(dnLogs.getOutput()).contains("Container DB file is missing");
}
@SuppressWarnings("checkstyle:MethodLength")
@ContainerTestVersionInfo.ContainerTest
public void testMultipleContainerReader(ContainerTestVersionInfo versionInfo)
throws Exception {
setLayoutAndSchemaVersion(versionInfo);
setup(versionInfo);
final int volumeNum = 10;
StringBuffer datanodeDirs = new StringBuffer();
File[] volumeDirs = new File[volumeNum];
for (int i = 0; i < volumeNum; i++) {
volumeDirs[i] =
Files.createDirectory(tempDir.resolve("volumeDir" + i)).toFile();
datanodeDirs = datanodeDirs.append(volumeDirs[i]).append(',');
}
BlockUtils.shutdownCache(conf);
conf.set(ScmConfigKeys.HDDS_DATANODE_DIR_KEY,
datanodeDirs.toString());
conf.set(OzoneConfigKeys.HDDS_CONTAINER_RATIS_DATANODE_STORAGE_DIR,
datanodeDirs.toString());
MutableVolumeSet volumeSets =
new MutableVolumeSet(datanodeId.toString(), clusterId, conf, null,
StorageVolume.VolumeType.DATA_VOLUME, null);
for (StorageVolume v : volumeSets.getVolumesList()) {
StorageVolumeUtil.checkVolume(v, clusterId, clusterId, conf,
null, null);
}
createDbInstancesForTestIfNeeded(volumeSets, clusterId, clusterId, conf);
ContainerCache cache = ContainerCache.getInstance(conf);
cache.shutdownCache();
RoundRobinVolumeChoosingPolicy policy =
new RoundRobinVolumeChoosingPolicy();
final int containerCount = 100;
blockCount = containerCount;
KeyValueContainer conflict01 = null;
KeyValueContainer conflict02 = null;
KeyValueContainer conflict11 = null;
KeyValueContainer conflict12 = null;
KeyValueContainer conflict21 = null;
KeyValueContainer conflict22 = null;
KeyValueContainer ec1 = null;
KeyValueContainer ec2 = null;
KeyValueContainer ec3 = null;
KeyValueContainer ec4 = null;
KeyValueContainer ec5 = null;
KeyValueContainer ec6 = null;
KeyValueContainer ec7 = null;
long baseBCSID = 10L;
for (int i = 0; i < containerCount; i++) {
if (i == 0) {
// Create a duplicate container with ID 0. Both have the same BSCID
conflict01 =
createContainerWithId(0, volumeSets, policy, baseBCSID, 0);
conflict02 =
createContainerWithId(0, volumeSets, policy, baseBCSID, 0);
} else if (i == 1) {
// Create a duplicate container with ID 1 so that the one has a
// larger BCSID
conflict11 =
createContainerWithId(1, volumeSets, policy, baseBCSID, 0);
conflict12 = createContainerWithId(
1, volumeSets, policy, baseBCSID - 1, 0);
} else if (i == 2) {
conflict21 =
createContainerWithId(i, volumeSets, policy, baseBCSID, 0);
conflict22 =
createContainerWithId(i, volumeSets, policy, baseBCSID, 0);
conflict22.close();
} else if (i == 3) {
ec1 = createContainerWithId(i, volumeSets, policy, baseBCSID, 1);
ec2 = createContainerWithId(i, volumeSets, policy, baseBCSID, 1);
} else if (i == 4) {
ec3 = createContainerWithId(i, volumeSets, policy, baseBCSID, 1);
ec4 = createContainerWithId(i, volumeSets, policy, baseBCSID, 2);
ec3.close();
ec4.close();
mockMetadataStore.getContainerCreateInfoTable().put(ContainerID.valueOf(i), ContainerCreateInfo.valueOf(
ContainerProtos.ContainerDataProto.State.CLOSED, 1));
} else if (i == 5) {
ec5 = createContainerWithId(i, volumeSets, policy, baseBCSID, 1);
ec6 = createContainerWithId(i, volumeSets, policy, baseBCSID, 2);
ec6.close();
ec5.close();
mockMetadataStore.getContainerCreateInfoTable().put(ContainerID.valueOf(i), ContainerCreateInfo.valueOf(
ContainerProtos.ContainerDataProto.State.CLOSED, 2));
} else if (i == 6) {
ec7 = createContainerWithId(i, volumeSets, policy, baseBCSID, 3);
ec7.close();
mockMetadataStore.getContainerCreateInfoTable().put(ContainerID.valueOf(i), ContainerCreateInfo.valueOf(
ContainerProtos.ContainerDataProto.State.CLOSED, -1));
} else {
createContainerWithId(i, volumeSets, policy, baseBCSID, 0);
}
}
// Close the RocksDB instance for this container and remove from the cache
// so it does not affect the ContainerReader, which avoids using the cache
// at startup for performance reasons.
cache.shutdownCache();
List<StorageVolume> volumes = volumeSets.getVolumesList();
ContainerReader[] containerReaders = new ContainerReader[volumeNum];
Thread[] threads = new Thread[volumeNum];
for (int i = 0; i < volumeNum; i++) {
containerReaders[i] = new ContainerReader(volumeSets,
(HddsVolume) volumes.get(i), containerSet, conf, true);
threads[i] = new Thread(containerReaders[i]);
}
long startTime = Time.monotonicNow();
for (int i = 0; i < volumeNum; i++) {
threads[i].start();
}
for (int i = 0; i < volumeNum; i++) {
threads[i].join();
}
System.out.println("Open " + volumeNum + " Volume with " + containerCount +
" costs " + (Time.monotonicNow() - startTime) / 1000 + "s");
assertEquals(containerCount,
containerSet.getContainerMap().entrySet().size());
assertEquals(volumeSet.getFailedVolumesList().size(), 0);
// One of the conflict01 or conflict02 should have had its container path
// removed.
List<Path> paths = new ArrayList<>();
paths.add(Paths.get(conflict01.getContainerData().getContainerPath()));
paths.add(Paths.get(conflict02.getContainerData().getContainerPath()));
int exist = 0;
for (Path p : paths) {
if (Files.exists(p)) {
exist++;
}
}
assertEquals(1, exist);
assertThat(paths).contains(Paths.get(
containerSet.getContainer(0).getContainerData().getContainerPath()));
// For conflict1, the one with the larger BCSID should win, which is
// conflict11.
assertFalse(Files.exists(Paths.get(
conflict12.getContainerData().getContainerPath())));
assertEquals(conflict11.getContainerData().getContainerPath(),
containerSet.getContainer(1).getContainerData().getContainerPath());
assertEquals(baseBCSID, containerSet.getContainer(1)
.getContainerData().getBlockCommitSequenceId());
// For conflict2, the closed on (conflict22) should win.
assertFalse(Files.exists(Paths.get(
conflict21.getContainerData().getContainerPath())));
assertEquals(conflict22.getContainerData().getContainerPath(),
containerSet.getContainer(2).getContainerData().getContainerPath());
assertEquals(ContainerProtos.ContainerDataProto.State.CLOSED,
containerSet.getContainer(2).getContainerData().getState());
// For the EC conflict, both containers should be left on disk
assertTrue(Files.exists(Paths.get(ec1.getContainerData().getContainerPath())));
assertTrue(Files.exists(Paths.get(ec2.getContainerData().getContainerPath())));
assertNotNull(containerSet.getContainer(3));
// For EC conflict with different replica index, all container present but containerSet loaded with same
// replica index as the one in DB.
assertTrue(Files.exists(Paths.get(ec3.getContainerData().getContainerPath())));
assertTrue(Files.exists(Paths.get(ec4.getContainerData().getContainerPath())));
assertEquals(containerSet.getContainer(ec3.getContainerData().getContainerID()).getContainerData()
.getReplicaIndex(), ec3.getContainerData().getReplicaIndex());
assertTrue(Files.exists(Paths.get(ec5.getContainerData().getContainerPath())));
assertTrue(Files.exists(Paths.get(ec6.getContainerData().getContainerPath())));
assertEquals(containerSet.getContainer(ec6.getContainerData().getContainerID()).getContainerData()
.getReplicaIndex(), ec6.getContainerData().getReplicaIndex());
// for EC container whose entry in DB with replica index -1, is allowed to be loaded
assertTrue(Files.exists(Paths.get(ec7.getContainerData().getContainerPath())));
assertEquals(3, mockMetadataStore.getContainerCreateInfoTable().get(
ContainerID.valueOf(ec7.getContainerData().getContainerID())).getReplicaIndex());
// There should be no open containers cached by the ContainerReader as it
// opens and closed them avoiding the cache.
assertEquals(0, cache.size());
}
private KeyValueContainer createContainerWithId(int id, VolumeSet volSet,
VolumeChoosingPolicy policy, long bcsid, int replicaIndex)
throws Exception {
KeyValueContainerData keyValueContainerData =
new KeyValueContainerData(id, layout,
(long) StorageUnit.GB.toBytes(5), UUID.randomUUID().toString(),
datanodeId.toString());
keyValueContainerData.setReplicaIndex(replicaIndex);
KeyValueContainer keyValueContainer =
new KeyValueContainer(keyValueContainerData,
conf);
keyValueContainer.create(volSet, policy, clusterId);
List<Long> blkNames;
if (id % 2 == 0) {
blkNames = addBlocks(keyValueContainer, true);
markBlocksForDelete(keyValueContainer, true, blkNames, id);
} else {
blkNames = addBlocks(keyValueContainer, false);
markBlocksForDelete(keyValueContainer, false, blkNames, id);
}
setBlockCommitSequence(keyValueContainerData, bcsid);
return keyValueContainer;
}
private void setBlockCommitSequence(KeyValueContainerData cData, long val)
throws IOException {
try (DBHandle metadataStore = BlockUtils.getDB(cData, conf)) {
metadataStore.getStore().getMetadataTable()
.put(cData.getBcsIdKey(), val);
metadataStore.getStore().flushDB();
}
cData.updateBlockCommitSequenceId(val);
}
@ContainerTestVersionInfo.ContainerTest
public void testMarkedDeletedContainerCleared(
ContainerTestVersionInfo versionInfo) throws Exception {
setLayoutAndSchemaVersion(versionInfo);
setup(versionInfo);
KeyValueContainerData containerData = new KeyValueContainerData(
101, layout, (long) StorageUnit.GB.toBytes(5),
UUID.randomUUID().toString(), datanodeId.toString());
//create a container with deleted state
containerData.setState(DELETED);
KeyValueContainer kvContainer =
new KeyValueContainer(containerData, conf);
kvContainer.create(
volumeSet, volumeChoosingPolicy, clusterId);
long baseCount = 0;
if (containerData.hasSchema(OzoneConsts.SCHEMA_V3)) {
// add db entry for the container ID 101 for V3
baseCount = addDbEntry(containerData);
}
// verify container data and perform cleanup
ContainerReader containerReader = new ContainerReader(volumeSet,
hddsVolume, containerSet, conf, true);
containerReader.run();
// assert that tmp dir is empty
File[] leftoverContainers =
hddsVolume.getDeletedContainerDir().listFiles();
assertNotNull(leftoverContainers);
assertEquals(0, leftoverContainers.length);
assertNull(containerSet.getContainer(101));
if (containerData.hasSchema(OzoneConsts.SCHEMA_V3)) {
// verify if newly added container is not present as added
try (DBHandle dbHandle = BlockUtils.getDB(
kvContainer.getContainerData(), conf)) {
DatanodeStoreSchemaThreeImpl store = (DatanodeStoreSchemaThreeImpl)
dbHandle.getStore();
assertEquals(baseCount, store.getMetadataTable()
.getEstimatedKeyCount());
}
}
}
@ContainerTestVersionInfo.ContainerTest
public void testContainerLoadingWithMerkleTreePresent(ContainerTestVersionInfo versionInfo)
throws Exception {
setLayoutAndSchemaVersion(versionInfo);
setup(versionInfo);
// Create a container with blocks and write MerkleTree
KeyValueContainer container = createContainer(10L);
KeyValueContainerData containerData = container.getContainerData();
ContainerMerkleTreeWriter treeWriter = ContainerMerkleTreeTestUtils.buildTestTree(conf);
ContainerChecksumTreeManager checksumManager = keyValueHandler.getChecksumManager();
keyValueHandler.updateContainerChecksum(container, treeWriter);
long expectedDataChecksum = checksumManager.read(containerData).getContainerMerkleTree().getDataChecksum();
// Test container loading
ContainerCache.getInstance(conf).shutdownCache();
ContainerReader containerReader = new ContainerReader(volumeSet, hddsVolume, containerSet, conf, true);
containerReader.run();
// Verify container was loaded successfully and data checksum is set
Container<?> loadedContainer = containerSet.getContainer(10L);
assertNotNull(loadedContainer);
KeyValueContainerData loadedData = (KeyValueContainerData) loadedContainer.getContainerData();
assertNotSame(containerData, loadedData);
assertEquals(expectedDataChecksum, loadedData.getDataChecksum());
verifyAllDataChecksumsMatch(loadedData, conf);
}
@ContainerTestVersionInfo.ContainerTest
public void testContainerLoadingWithMerkleTreeFallbackToRocksDB(ContainerTestVersionInfo versionInfo)
throws Exception {
setLayoutAndSchemaVersion(versionInfo);
setup(versionInfo);
KeyValueContainer container = createContainer(11L);
KeyValueContainerData containerData = container.getContainerData();
ContainerMerkleTreeWriter treeWriter = ContainerMerkleTreeTestUtils.buildTestTree(conf);
ContainerChecksumTreeManager checksumManager = new ContainerChecksumTreeManager(conf);
ContainerProtos.ContainerChecksumInfo checksumInfo = checksumManager.updateTree(containerData, treeWriter);
long dataChecksum = checksumInfo.getContainerMerkleTree().getDataChecksum();
// Verify no checksum in RocksDB initially
try (DBHandle dbHandle = BlockUtils.getDB(containerData, conf)) {
Long dbDataChecksum = dbHandle.getStore().getMetadataTable().get(containerData.getContainerDataChecksumKey());
assertNull(dbDataChecksum);
}
ContainerCache.getInstance(conf).shutdownCache();
// Test container loading - should read from MerkleTree and store in RocksDB
ContainerReader containerReader = new ContainerReader(volumeSet, hddsVolume, containerSet, conf, true);
containerReader.run();
// Verify container uses checksum from MerkleTree
Container<?> loadedContainer = containerSet.getContainer(11L);
assertNotNull(loadedContainer);
KeyValueContainerData loadedData = (KeyValueContainerData) loadedContainer.getContainerData();
assertNotSame(containerData, loadedData);
assertEquals(dataChecksum, loadedData.getDataChecksum());
// Verify checksum was stored in RocksDB as fallback
verifyAllDataChecksumsMatch(loadedData, conf);
}
@ContainerTestVersionInfo.ContainerTest
public void testContainerLoadingWithNoChecksumAnywhere(ContainerTestVersionInfo versionInfo)
throws Exception {
setLayoutAndSchemaVersion(versionInfo);
setup(versionInfo);
KeyValueContainer container = createContainer(12L);
KeyValueContainerData containerData = container.getContainerData();
// Verify no checksum in RocksDB
try (DBHandle dbHandle = BlockUtils.getDB(containerData, conf)) {
Long dbDataChecksum = dbHandle.getStore().getMetadataTable().get(containerData.getContainerDataChecksumKey());
assertNull(dbDataChecksum);
}
File checksumFile = ContainerChecksumTreeManager.getContainerChecksumFile(containerData);
assertFalse(checksumFile.exists());
// Test container loading - should default to 0
ContainerCache.getInstance(conf).shutdownCache();
ContainerReader containerReader = new ContainerReader(volumeSet, hddsVolume, containerSet, conf, true);
containerReader.run();
// Verify container loads with default checksum of 0
Container<?> loadedContainer = containerSet.getContainer(12L);
assertNotNull(loadedContainer);
KeyValueContainerData loadedData = (KeyValueContainerData) loadedContainer.getContainerData();
assertNotSame(containerData, loadedData);
assertEquals(0L, loadedData.getDataChecksum());
// The checksum is not stored in rocksDB as the checksum file doesn't exist.
verifyAllDataChecksumsMatch(loadedData, conf);
}
@ContainerTestVersionInfo.ContainerTest
public void testContainerLoadingWithoutMerkleTree(ContainerTestVersionInfo versionInfo)
throws Exception {
setLayoutAndSchemaVersion(versionInfo);
setup(versionInfo);
KeyValueContainer container = createContainer(13L);
KeyValueContainerData containerData = container.getContainerData();
ContainerMerkleTreeWriter treeWriter = new ContainerMerkleTreeWriter();
keyValueHandler.updateContainerChecksum(container, treeWriter);
// Create an empty checksum file that exists but has no valid merkle tree
assertTrue(ContainerChecksumTreeManager.getContainerChecksumFile(containerData).exists());
// Verify no checksum in RocksDB initially
try (DBHandle dbHandle = BlockUtils.getDB(containerData, conf)) {
Long dbDataChecksum = dbHandle.getStore().getMetadataTable().get(containerData.getContainerDataChecksumKey());
assertNull(dbDataChecksum);
}
ContainerCache.getInstance(conf).shutdownCache();
// Test container loading - should handle when checksum file is present without the container merkle tree and
// default to 0.
ContainerReader containerReader = new ContainerReader(volumeSet, hddsVolume, containerSet, conf, true);
containerReader.run();
// Verify container loads with default checksum of 0 when checksum file doesn't have merkle tree
Container<?> loadedContainer = containerSet.getContainer(13L);
assertNotNull(loadedContainer);
KeyValueContainerData loadedData = (KeyValueContainerData) loadedContainer.getContainerData();
assertNotSame(containerData, loadedData);
assertEquals(0L, loadedData.getDataChecksum());
verifyAllDataChecksumsMatch(loadedData, conf);
}
private KeyValueContainer createContainer(long containerId) throws Exception {
KeyValueContainerData containerData = new KeyValueContainerData(containerId, layout,
(long) StorageUnit.GB.toBytes(5), UUID.randomUUID().toString(), datanodeId.toString());
containerData.setState(ContainerProtos.ContainerDataProto.State.CLOSED);
KeyValueContainer container = new KeyValueContainer(containerData, conf);
container.create(volumeSet, volumeChoosingPolicy, clusterId);
return container;
}
private long addDbEntry(KeyValueContainerData containerData)
throws Exception {
try (DBHandle dbHandle = BlockUtils.getDB(containerData, conf)) {
DatanodeStoreSchemaThreeImpl store = (DatanodeStoreSchemaThreeImpl)
dbHandle.getStore();
Table<String, Long> metadataTable = store.getMetadataTable();
long baseSize = metadataTable.getEstimatedKeyCount();
metadataTable.put(containerData.getBytesUsedKey(), 0L);
metadataTable.put(containerData.getBlockCountKey(), 0L);
metadataTable.put(containerData.getPendingDeleteBlockCountKey(), 0L);
// The new keys should have been added in the MetadataTable
assertEquals(baseSize + 3,
metadataTable.getEstimatedKeyCount());
return baseSize;
}
}
private void setLayoutAndSchemaVersion(
ContainerTestVersionInfo versionInfo) {
layout = versionInfo.getLayout();
String schemaVersion = versionInfo.getSchemaVersion();
conf = new OzoneConfiguration();
ContainerTestVersionInfo.setTestSchemaVersion(schemaVersion, conf);
}
}
|
googleapis/google-cloud-java | 37,026 | java-admanager/proto-ad-manager-v1/src/main/java/com/google/ads/admanager/v1/BandwidthTargeting.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/ads/admanager/v1/targeting.proto
// Protobuf Java Version: 3.25.8
package com.google.ads.admanager.v1;
/**
*
*
* <pre>
* Bandwidth Targeting.
*
* Reach users accessing the internet via various means of connection, such as
* cable, DSL, or dial-up. Can be useful to target campaigns using
* low-resolution creatives or text ads for users with low bandwidth.
* </pre>
*
* Protobuf type {@code google.ads.admanager.v1.BandwidthTargeting}
*/
public final class BandwidthTargeting extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.ads.admanager.v1.BandwidthTargeting)
BandwidthTargetingOrBuilder {
private static final long serialVersionUID = 0L;
// Use BandwidthTargeting.newBuilder() to construct.
private BandwidthTargeting(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private BandwidthTargeting() {
targetedBandwidthGroups_ = com.google.protobuf.LazyStringArrayList.emptyList();
excludedBandwidthGroups_ = com.google.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new BandwidthTargeting();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.ads.admanager.v1.TargetingProto
.internal_static_google_ads_admanager_v1_BandwidthTargeting_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.admanager.v1.TargetingProto
.internal_static_google_ads_admanager_v1_BandwidthTargeting_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.admanager.v1.BandwidthTargeting.class,
com.google.ads.admanager.v1.BandwidthTargeting.Builder.class);
}
public static final int TARGETED_BANDWIDTH_GROUPS_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList targetedBandwidthGroups_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
*
*
* <pre>
* Optional. A list of resource names of the bandwidth groups that should be
* targeted/included.
* </pre>
*
* <code>
* repeated string targeted_bandwidth_groups = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return A list containing the targetedBandwidthGroups.
*/
public com.google.protobuf.ProtocolStringList getTargetedBandwidthGroupsList() {
return targetedBandwidthGroups_;
}
/**
*
*
* <pre>
* Optional. A list of resource names of the bandwidth groups that should be
* targeted/included.
* </pre>
*
* <code>
* repeated string targeted_bandwidth_groups = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The count of targetedBandwidthGroups.
*/
public int getTargetedBandwidthGroupsCount() {
return targetedBandwidthGroups_.size();
}
/**
*
*
* <pre>
* Optional. A list of resource names of the bandwidth groups that should be
* targeted/included.
* </pre>
*
* <code>
* repeated string targeted_bandwidth_groups = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @param index The index of the element to return.
* @return The targetedBandwidthGroups at the given index.
*/
public java.lang.String getTargetedBandwidthGroups(int index) {
return targetedBandwidthGroups_.get(index);
}
/**
*
*
* <pre>
* Optional. A list of resource names of the bandwidth groups that should be
* targeted/included.
* </pre>
*
* <code>
* repeated string targeted_bandwidth_groups = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @param index The index of the value to return.
* @return The bytes of the targetedBandwidthGroups at the given index.
*/
public com.google.protobuf.ByteString getTargetedBandwidthGroupsBytes(int index) {
return targetedBandwidthGroups_.getByteString(index);
}
public static final int EXCLUDED_BANDWIDTH_GROUPS_FIELD_NUMBER = 4;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList excludedBandwidthGroups_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
*
*
* <pre>
* Optional. A list of resource names of the bandwidth groups that should be
* excluded.
* </pre>
*
* <code>
* repeated string excluded_bandwidth_groups = 4 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return A list containing the excludedBandwidthGroups.
*/
public com.google.protobuf.ProtocolStringList getExcludedBandwidthGroupsList() {
return excludedBandwidthGroups_;
}
/**
*
*
* <pre>
* Optional. A list of resource names of the bandwidth groups that should be
* excluded.
* </pre>
*
* <code>
* repeated string excluded_bandwidth_groups = 4 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The count of excludedBandwidthGroups.
*/
public int getExcludedBandwidthGroupsCount() {
return excludedBandwidthGroups_.size();
}
/**
*
*
* <pre>
* Optional. A list of resource names of the bandwidth groups that should be
* excluded.
* </pre>
*
* <code>
* repeated string excluded_bandwidth_groups = 4 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @param index The index of the element to return.
* @return The excludedBandwidthGroups at the given index.
*/
public java.lang.String getExcludedBandwidthGroups(int index) {
return excludedBandwidthGroups_.get(index);
}
/**
*
*
* <pre>
* Optional. A list of resource names of the bandwidth groups that should be
* excluded.
* </pre>
*
* <code>
* repeated string excluded_bandwidth_groups = 4 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @param index The index of the value to return.
* @return The bytes of the excludedBandwidthGroups at the given index.
*/
public com.google.protobuf.ByteString getExcludedBandwidthGroupsBytes(int index) {
return excludedBandwidthGroups_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < targetedBandwidthGroups_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(
output, 3, targetedBandwidthGroups_.getRaw(i));
}
for (int i = 0; i < excludedBandwidthGroups_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(
output, 4, excludedBandwidthGroups_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
{
int dataSize = 0;
for (int i = 0; i < targetedBandwidthGroups_.size(); i++) {
dataSize += computeStringSizeNoTag(targetedBandwidthGroups_.getRaw(i));
}
size += dataSize;
size += 1 * getTargetedBandwidthGroupsList().size();
}
{
int dataSize = 0;
for (int i = 0; i < excludedBandwidthGroups_.size(); i++) {
dataSize += computeStringSizeNoTag(excludedBandwidthGroups_.getRaw(i));
}
size += dataSize;
size += 1 * getExcludedBandwidthGroupsList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.ads.admanager.v1.BandwidthTargeting)) {
return super.equals(obj);
}
com.google.ads.admanager.v1.BandwidthTargeting other =
(com.google.ads.admanager.v1.BandwidthTargeting) obj;
if (!getTargetedBandwidthGroupsList().equals(other.getTargetedBandwidthGroupsList()))
return false;
if (!getExcludedBandwidthGroupsList().equals(other.getExcludedBandwidthGroupsList()))
return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getTargetedBandwidthGroupsCount() > 0) {
hash = (37 * hash) + TARGETED_BANDWIDTH_GROUPS_FIELD_NUMBER;
hash = (53 * hash) + getTargetedBandwidthGroupsList().hashCode();
}
if (getExcludedBandwidthGroupsCount() > 0) {
hash = (37 * hash) + EXCLUDED_BANDWIDTH_GROUPS_FIELD_NUMBER;
hash = (53 * hash) + getExcludedBandwidthGroupsList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.ads.admanager.v1.BandwidthTargeting parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.admanager.v1.BandwidthTargeting parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.admanager.v1.BandwidthTargeting parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.admanager.v1.BandwidthTargeting parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.admanager.v1.BandwidthTargeting parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.ads.admanager.v1.BandwidthTargeting parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.ads.admanager.v1.BandwidthTargeting parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.ads.admanager.v1.BandwidthTargeting parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.ads.admanager.v1.BandwidthTargeting parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.ads.admanager.v1.BandwidthTargeting parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.ads.admanager.v1.BandwidthTargeting parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.ads.admanager.v1.BandwidthTargeting parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.ads.admanager.v1.BandwidthTargeting prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Bandwidth Targeting.
*
* Reach users accessing the internet via various means of connection, such as
* cable, DSL, or dial-up. Can be useful to target campaigns using
* low-resolution creatives or text ads for users with low bandwidth.
* </pre>
*
* Protobuf type {@code google.ads.admanager.v1.BandwidthTargeting}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.ads.admanager.v1.BandwidthTargeting)
com.google.ads.admanager.v1.BandwidthTargetingOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.ads.admanager.v1.TargetingProto
.internal_static_google_ads_admanager_v1_BandwidthTargeting_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.ads.admanager.v1.TargetingProto
.internal_static_google_ads_admanager_v1_BandwidthTargeting_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.ads.admanager.v1.BandwidthTargeting.class,
com.google.ads.admanager.v1.BandwidthTargeting.Builder.class);
}
// Construct using com.google.ads.admanager.v1.BandwidthTargeting.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
targetedBandwidthGroups_ = com.google.protobuf.LazyStringArrayList.emptyList();
excludedBandwidthGroups_ = com.google.protobuf.LazyStringArrayList.emptyList();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.ads.admanager.v1.TargetingProto
.internal_static_google_ads_admanager_v1_BandwidthTargeting_descriptor;
}
@java.lang.Override
public com.google.ads.admanager.v1.BandwidthTargeting getDefaultInstanceForType() {
return com.google.ads.admanager.v1.BandwidthTargeting.getDefaultInstance();
}
@java.lang.Override
public com.google.ads.admanager.v1.BandwidthTargeting build() {
com.google.ads.admanager.v1.BandwidthTargeting result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.ads.admanager.v1.BandwidthTargeting buildPartial() {
com.google.ads.admanager.v1.BandwidthTargeting result =
new com.google.ads.admanager.v1.BandwidthTargeting(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.ads.admanager.v1.BandwidthTargeting result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
targetedBandwidthGroups_.makeImmutable();
result.targetedBandwidthGroups_ = targetedBandwidthGroups_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
excludedBandwidthGroups_.makeImmutable();
result.excludedBandwidthGroups_ = excludedBandwidthGroups_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.ads.admanager.v1.BandwidthTargeting) {
return mergeFrom((com.google.ads.admanager.v1.BandwidthTargeting) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.ads.admanager.v1.BandwidthTargeting other) {
if (other == com.google.ads.admanager.v1.BandwidthTargeting.getDefaultInstance()) return this;
if (!other.targetedBandwidthGroups_.isEmpty()) {
if (targetedBandwidthGroups_.isEmpty()) {
targetedBandwidthGroups_ = other.targetedBandwidthGroups_;
bitField0_ |= 0x00000001;
} else {
ensureTargetedBandwidthGroupsIsMutable();
targetedBandwidthGroups_.addAll(other.targetedBandwidthGroups_);
}
onChanged();
}
if (!other.excludedBandwidthGroups_.isEmpty()) {
if (excludedBandwidthGroups_.isEmpty()) {
excludedBandwidthGroups_ = other.excludedBandwidthGroups_;
bitField0_ |= 0x00000002;
} else {
ensureExcludedBandwidthGroupsIsMutable();
excludedBandwidthGroups_.addAll(other.excludedBandwidthGroups_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 26:
{
java.lang.String s = input.readStringRequireUtf8();
ensureTargetedBandwidthGroupsIsMutable();
targetedBandwidthGroups_.add(s);
break;
} // case 26
case 34:
{
java.lang.String s = input.readStringRequireUtf8();
ensureExcludedBandwidthGroupsIsMutable();
excludedBandwidthGroups_.add(s);
break;
} // case 34
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.LazyStringArrayList targetedBandwidthGroups_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureTargetedBandwidthGroupsIsMutable() {
if (!targetedBandwidthGroups_.isModifiable()) {
targetedBandwidthGroups_ =
new com.google.protobuf.LazyStringArrayList(targetedBandwidthGroups_);
}
bitField0_ |= 0x00000001;
}
/**
*
*
* <pre>
* Optional. A list of resource names of the bandwidth groups that should be
* targeted/included.
* </pre>
*
* <code>
* repeated string targeted_bandwidth_groups = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return A list containing the targetedBandwidthGroups.
*/
public com.google.protobuf.ProtocolStringList getTargetedBandwidthGroupsList() {
targetedBandwidthGroups_.makeImmutable();
return targetedBandwidthGroups_;
}
/**
*
*
* <pre>
* Optional. A list of resource names of the bandwidth groups that should be
* targeted/included.
* </pre>
*
* <code>
* repeated string targeted_bandwidth_groups = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The count of targetedBandwidthGroups.
*/
public int getTargetedBandwidthGroupsCount() {
return targetedBandwidthGroups_.size();
}
/**
*
*
* <pre>
* Optional. A list of resource names of the bandwidth groups that should be
* targeted/included.
* </pre>
*
* <code>
* repeated string targeted_bandwidth_groups = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @param index The index of the element to return.
* @return The targetedBandwidthGroups at the given index.
*/
public java.lang.String getTargetedBandwidthGroups(int index) {
return targetedBandwidthGroups_.get(index);
}
/**
*
*
* <pre>
* Optional. A list of resource names of the bandwidth groups that should be
* targeted/included.
* </pre>
*
* <code>
* repeated string targeted_bandwidth_groups = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @param index The index of the value to return.
* @return The bytes of the targetedBandwidthGroups at the given index.
*/
public com.google.protobuf.ByteString getTargetedBandwidthGroupsBytes(int index) {
return targetedBandwidthGroups_.getByteString(index);
}
/**
*
*
* <pre>
* Optional. A list of resource names of the bandwidth groups that should be
* targeted/included.
* </pre>
*
* <code>
* repeated string targeted_bandwidth_groups = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @param index The index to set the value at.
* @param value The targetedBandwidthGroups to set.
* @return This builder for chaining.
*/
public Builder setTargetedBandwidthGroups(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureTargetedBandwidthGroupsIsMutable();
targetedBandwidthGroups_.set(index, value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A list of resource names of the bandwidth groups that should be
* targeted/included.
* </pre>
*
* <code>
* repeated string targeted_bandwidth_groups = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The targetedBandwidthGroups to add.
* @return This builder for chaining.
*/
public Builder addTargetedBandwidthGroups(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureTargetedBandwidthGroupsIsMutable();
targetedBandwidthGroups_.add(value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A list of resource names of the bandwidth groups that should be
* targeted/included.
* </pre>
*
* <code>
* repeated string targeted_bandwidth_groups = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @param values The targetedBandwidthGroups to add.
* @return This builder for chaining.
*/
public Builder addAllTargetedBandwidthGroups(java.lang.Iterable<java.lang.String> values) {
ensureTargetedBandwidthGroupsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, targetedBandwidthGroups_);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A list of resource names of the bandwidth groups that should be
* targeted/included.
* </pre>
*
* <code>
* repeated string targeted_bandwidth_groups = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearTargetedBandwidthGroups() {
targetedBandwidthGroups_ = com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A list of resource names of the bandwidth groups that should be
* targeted/included.
* </pre>
*
* <code>
* repeated string targeted_bandwidth_groups = 3 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes of the targetedBandwidthGroups to add.
* @return This builder for chaining.
*/
public Builder addTargetedBandwidthGroupsBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureTargetedBandwidthGroupsIsMutable();
targetedBandwidthGroups_.add(value);
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList excludedBandwidthGroups_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureExcludedBandwidthGroupsIsMutable() {
if (!excludedBandwidthGroups_.isModifiable()) {
excludedBandwidthGroups_ =
new com.google.protobuf.LazyStringArrayList(excludedBandwidthGroups_);
}
bitField0_ |= 0x00000002;
}
/**
*
*
* <pre>
* Optional. A list of resource names of the bandwidth groups that should be
* excluded.
* </pre>
*
* <code>
* repeated string excluded_bandwidth_groups = 4 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return A list containing the excludedBandwidthGroups.
*/
public com.google.protobuf.ProtocolStringList getExcludedBandwidthGroupsList() {
excludedBandwidthGroups_.makeImmutable();
return excludedBandwidthGroups_;
}
/**
*
*
* <pre>
* Optional. A list of resource names of the bandwidth groups that should be
* excluded.
* </pre>
*
* <code>
* repeated string excluded_bandwidth_groups = 4 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The count of excludedBandwidthGroups.
*/
public int getExcludedBandwidthGroupsCount() {
return excludedBandwidthGroups_.size();
}
/**
*
*
* <pre>
* Optional. A list of resource names of the bandwidth groups that should be
* excluded.
* </pre>
*
* <code>
* repeated string excluded_bandwidth_groups = 4 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @param index The index of the element to return.
* @return The excludedBandwidthGroups at the given index.
*/
public java.lang.String getExcludedBandwidthGroups(int index) {
return excludedBandwidthGroups_.get(index);
}
/**
*
*
* <pre>
* Optional. A list of resource names of the bandwidth groups that should be
* excluded.
* </pre>
*
* <code>
* repeated string excluded_bandwidth_groups = 4 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @param index The index of the value to return.
* @return The bytes of the excludedBandwidthGroups at the given index.
*/
public com.google.protobuf.ByteString getExcludedBandwidthGroupsBytes(int index) {
return excludedBandwidthGroups_.getByteString(index);
}
/**
*
*
* <pre>
* Optional. A list of resource names of the bandwidth groups that should be
* excluded.
* </pre>
*
* <code>
* repeated string excluded_bandwidth_groups = 4 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @param index The index to set the value at.
* @param value The excludedBandwidthGroups to set.
* @return This builder for chaining.
*/
public Builder setExcludedBandwidthGroups(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureExcludedBandwidthGroupsIsMutable();
excludedBandwidthGroups_.set(index, value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A list of resource names of the bandwidth groups that should be
* excluded.
* </pre>
*
* <code>
* repeated string excluded_bandwidth_groups = 4 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The excludedBandwidthGroups to add.
* @return This builder for chaining.
*/
public Builder addExcludedBandwidthGroups(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureExcludedBandwidthGroupsIsMutable();
excludedBandwidthGroups_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A list of resource names of the bandwidth groups that should be
* excluded.
* </pre>
*
* <code>
* repeated string excluded_bandwidth_groups = 4 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @param values The excludedBandwidthGroups to add.
* @return This builder for chaining.
*/
public Builder addAllExcludedBandwidthGroups(java.lang.Iterable<java.lang.String> values) {
ensureExcludedBandwidthGroupsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, excludedBandwidthGroups_);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A list of resource names of the bandwidth groups that should be
* excluded.
* </pre>
*
* <code>
* repeated string excluded_bandwidth_groups = 4 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearExcludedBandwidthGroups() {
excludedBandwidthGroups_ = com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. A list of resource names of the bandwidth groups that should be
* excluded.
* </pre>
*
* <code>
* repeated string excluded_bandwidth_groups = 4 [(.google.api.field_behavior) = OPTIONAL, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes of the excludedBandwidthGroups to add.
* @return This builder for chaining.
*/
public Builder addExcludedBandwidthGroupsBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureExcludedBandwidthGroupsIsMutable();
excludedBandwidthGroups_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.ads.admanager.v1.BandwidthTargeting)
}
// @@protoc_insertion_point(class_scope:google.ads.admanager.v1.BandwidthTargeting)
private static final com.google.ads.admanager.v1.BandwidthTargeting DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.ads.admanager.v1.BandwidthTargeting();
}
public static com.google.ads.admanager.v1.BandwidthTargeting getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<BandwidthTargeting> PARSER =
new com.google.protobuf.AbstractParser<BandwidthTargeting>() {
@java.lang.Override
public BandwidthTargeting parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<BandwidthTargeting> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<BandwidthTargeting> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.ads.admanager.v1.BandwidthTargeting getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,966 | java-apihub/proto-google-cloud-apihub-v1/src/main/java/com/google/cloud/apihub/v1/ListExternalApisResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/apihub/v1/apihub_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.apihub.v1;
/**
*
*
* <pre>
* The [ListExternalApis][google.cloud.apihub.v1.ApiHub.ListExternalApis]
* method's response.
* </pre>
*
* Protobuf type {@code google.cloud.apihub.v1.ListExternalApisResponse}
*/
public final class ListExternalApisResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.apihub.v1.ListExternalApisResponse)
ListExternalApisResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListExternalApisResponse.newBuilder() to construct.
private ListExternalApisResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListExternalApisResponse() {
externalApis_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListExternalApisResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.apihub.v1.ApiHubServiceProto
.internal_static_google_cloud_apihub_v1_ListExternalApisResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.apihub.v1.ApiHubServiceProto
.internal_static_google_cloud_apihub_v1_ListExternalApisResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.apihub.v1.ListExternalApisResponse.class,
com.google.cloud.apihub.v1.ListExternalApisResponse.Builder.class);
}
public static final int EXTERNAL_APIS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.apihub.v1.ExternalApi> externalApis_;
/**
*
*
* <pre>
* The External API resources present in the API hub.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.ExternalApi external_apis = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.apihub.v1.ExternalApi> getExternalApisList() {
return externalApis_;
}
/**
*
*
* <pre>
* The External API resources present in the API hub.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.ExternalApi external_apis = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.apihub.v1.ExternalApiOrBuilder>
getExternalApisOrBuilderList() {
return externalApis_;
}
/**
*
*
* <pre>
* The External API resources present in the API hub.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.ExternalApi external_apis = 1;</code>
*/
@java.lang.Override
public int getExternalApisCount() {
return externalApis_.size();
}
/**
*
*
* <pre>
* The External API resources present in the API hub.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.ExternalApi external_apis = 1;</code>
*/
@java.lang.Override
public com.google.cloud.apihub.v1.ExternalApi getExternalApis(int index) {
return externalApis_.get(index);
}
/**
*
*
* <pre>
* The External API resources present in the API hub.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.ExternalApi external_apis = 1;</code>
*/
@java.lang.Override
public com.google.cloud.apihub.v1.ExternalApiOrBuilder getExternalApisOrBuilder(int index) {
return externalApis_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < externalApis_.size(); i++) {
output.writeMessage(1, externalApis_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < externalApis_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, externalApis_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.apihub.v1.ListExternalApisResponse)) {
return super.equals(obj);
}
com.google.cloud.apihub.v1.ListExternalApisResponse other =
(com.google.cloud.apihub.v1.ListExternalApisResponse) obj;
if (!getExternalApisList().equals(other.getExternalApisList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getExternalApisCount() > 0) {
hash = (37 * hash) + EXTERNAL_APIS_FIELD_NUMBER;
hash = (53 * hash) + getExternalApisList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.apihub.v1.ListExternalApisResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apihub.v1.ListExternalApisResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apihub.v1.ListExternalApisResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apihub.v1.ListExternalApisResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apihub.v1.ListExternalApisResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.apihub.v1.ListExternalApisResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.apihub.v1.ListExternalApisResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.apihub.v1.ListExternalApisResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.apihub.v1.ListExternalApisResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.apihub.v1.ListExternalApisResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.apihub.v1.ListExternalApisResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.apihub.v1.ListExternalApisResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.apihub.v1.ListExternalApisResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The [ListExternalApis][google.cloud.apihub.v1.ApiHub.ListExternalApis]
* method's response.
* </pre>
*
* Protobuf type {@code google.cloud.apihub.v1.ListExternalApisResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.apihub.v1.ListExternalApisResponse)
com.google.cloud.apihub.v1.ListExternalApisResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.apihub.v1.ApiHubServiceProto
.internal_static_google_cloud_apihub_v1_ListExternalApisResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.apihub.v1.ApiHubServiceProto
.internal_static_google_cloud_apihub_v1_ListExternalApisResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.apihub.v1.ListExternalApisResponse.class,
com.google.cloud.apihub.v1.ListExternalApisResponse.Builder.class);
}
// Construct using com.google.cloud.apihub.v1.ListExternalApisResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (externalApisBuilder_ == null) {
externalApis_ = java.util.Collections.emptyList();
} else {
externalApis_ = null;
externalApisBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.apihub.v1.ApiHubServiceProto
.internal_static_google_cloud_apihub_v1_ListExternalApisResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.apihub.v1.ListExternalApisResponse getDefaultInstanceForType() {
return com.google.cloud.apihub.v1.ListExternalApisResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.apihub.v1.ListExternalApisResponse build() {
com.google.cloud.apihub.v1.ListExternalApisResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.apihub.v1.ListExternalApisResponse buildPartial() {
com.google.cloud.apihub.v1.ListExternalApisResponse result =
new com.google.cloud.apihub.v1.ListExternalApisResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.apihub.v1.ListExternalApisResponse result) {
if (externalApisBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
externalApis_ = java.util.Collections.unmodifiableList(externalApis_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.externalApis_ = externalApis_;
} else {
result.externalApis_ = externalApisBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.apihub.v1.ListExternalApisResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.apihub.v1.ListExternalApisResponse) {
return mergeFrom((com.google.cloud.apihub.v1.ListExternalApisResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.apihub.v1.ListExternalApisResponse other) {
if (other == com.google.cloud.apihub.v1.ListExternalApisResponse.getDefaultInstance())
return this;
if (externalApisBuilder_ == null) {
if (!other.externalApis_.isEmpty()) {
if (externalApis_.isEmpty()) {
externalApis_ = other.externalApis_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureExternalApisIsMutable();
externalApis_.addAll(other.externalApis_);
}
onChanged();
}
} else {
if (!other.externalApis_.isEmpty()) {
if (externalApisBuilder_.isEmpty()) {
externalApisBuilder_.dispose();
externalApisBuilder_ = null;
externalApis_ = other.externalApis_;
bitField0_ = (bitField0_ & ~0x00000001);
externalApisBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getExternalApisFieldBuilder()
: null;
} else {
externalApisBuilder_.addAllMessages(other.externalApis_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.apihub.v1.ExternalApi m =
input.readMessage(
com.google.cloud.apihub.v1.ExternalApi.parser(), extensionRegistry);
if (externalApisBuilder_ == null) {
ensureExternalApisIsMutable();
externalApis_.add(m);
} else {
externalApisBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.apihub.v1.ExternalApi> externalApis_ =
java.util.Collections.emptyList();
private void ensureExternalApisIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
externalApis_ =
new java.util.ArrayList<com.google.cloud.apihub.v1.ExternalApi>(externalApis_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.apihub.v1.ExternalApi,
com.google.cloud.apihub.v1.ExternalApi.Builder,
com.google.cloud.apihub.v1.ExternalApiOrBuilder>
externalApisBuilder_;
/**
*
*
* <pre>
* The External API resources present in the API hub.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.ExternalApi external_apis = 1;</code>
*/
public java.util.List<com.google.cloud.apihub.v1.ExternalApi> getExternalApisList() {
if (externalApisBuilder_ == null) {
return java.util.Collections.unmodifiableList(externalApis_);
} else {
return externalApisBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The External API resources present in the API hub.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.ExternalApi external_apis = 1;</code>
*/
public int getExternalApisCount() {
if (externalApisBuilder_ == null) {
return externalApis_.size();
} else {
return externalApisBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The External API resources present in the API hub.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.ExternalApi external_apis = 1;</code>
*/
public com.google.cloud.apihub.v1.ExternalApi getExternalApis(int index) {
if (externalApisBuilder_ == null) {
return externalApis_.get(index);
} else {
return externalApisBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The External API resources present in the API hub.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.ExternalApi external_apis = 1;</code>
*/
public Builder setExternalApis(int index, com.google.cloud.apihub.v1.ExternalApi value) {
if (externalApisBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureExternalApisIsMutable();
externalApis_.set(index, value);
onChanged();
} else {
externalApisBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The External API resources present in the API hub.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.ExternalApi external_apis = 1;</code>
*/
public Builder setExternalApis(
int index, com.google.cloud.apihub.v1.ExternalApi.Builder builderForValue) {
if (externalApisBuilder_ == null) {
ensureExternalApisIsMutable();
externalApis_.set(index, builderForValue.build());
onChanged();
} else {
externalApisBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The External API resources present in the API hub.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.ExternalApi external_apis = 1;</code>
*/
public Builder addExternalApis(com.google.cloud.apihub.v1.ExternalApi value) {
if (externalApisBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureExternalApisIsMutable();
externalApis_.add(value);
onChanged();
} else {
externalApisBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The External API resources present in the API hub.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.ExternalApi external_apis = 1;</code>
*/
public Builder addExternalApis(int index, com.google.cloud.apihub.v1.ExternalApi value) {
if (externalApisBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureExternalApisIsMutable();
externalApis_.add(index, value);
onChanged();
} else {
externalApisBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The External API resources present in the API hub.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.ExternalApi external_apis = 1;</code>
*/
public Builder addExternalApis(com.google.cloud.apihub.v1.ExternalApi.Builder builderForValue) {
if (externalApisBuilder_ == null) {
ensureExternalApisIsMutable();
externalApis_.add(builderForValue.build());
onChanged();
} else {
externalApisBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The External API resources present in the API hub.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.ExternalApi external_apis = 1;</code>
*/
public Builder addExternalApis(
int index, com.google.cloud.apihub.v1.ExternalApi.Builder builderForValue) {
if (externalApisBuilder_ == null) {
ensureExternalApisIsMutable();
externalApis_.add(index, builderForValue.build());
onChanged();
} else {
externalApisBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The External API resources present in the API hub.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.ExternalApi external_apis = 1;</code>
*/
public Builder addAllExternalApis(
java.lang.Iterable<? extends com.google.cloud.apihub.v1.ExternalApi> values) {
if (externalApisBuilder_ == null) {
ensureExternalApisIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, externalApis_);
onChanged();
} else {
externalApisBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The External API resources present in the API hub.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.ExternalApi external_apis = 1;</code>
*/
public Builder clearExternalApis() {
if (externalApisBuilder_ == null) {
externalApis_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
externalApisBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The External API resources present in the API hub.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.ExternalApi external_apis = 1;</code>
*/
public Builder removeExternalApis(int index) {
if (externalApisBuilder_ == null) {
ensureExternalApisIsMutable();
externalApis_.remove(index);
onChanged();
} else {
externalApisBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The External API resources present in the API hub.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.ExternalApi external_apis = 1;</code>
*/
public com.google.cloud.apihub.v1.ExternalApi.Builder getExternalApisBuilder(int index) {
return getExternalApisFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The External API resources present in the API hub.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.ExternalApi external_apis = 1;</code>
*/
public com.google.cloud.apihub.v1.ExternalApiOrBuilder getExternalApisOrBuilder(int index) {
if (externalApisBuilder_ == null) {
return externalApis_.get(index);
} else {
return externalApisBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The External API resources present in the API hub.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.ExternalApi external_apis = 1;</code>
*/
public java.util.List<? extends com.google.cloud.apihub.v1.ExternalApiOrBuilder>
getExternalApisOrBuilderList() {
if (externalApisBuilder_ != null) {
return externalApisBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(externalApis_);
}
}
/**
*
*
* <pre>
* The External API resources present in the API hub.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.ExternalApi external_apis = 1;</code>
*/
public com.google.cloud.apihub.v1.ExternalApi.Builder addExternalApisBuilder() {
return getExternalApisFieldBuilder()
.addBuilder(com.google.cloud.apihub.v1.ExternalApi.getDefaultInstance());
}
/**
*
*
* <pre>
* The External API resources present in the API hub.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.ExternalApi external_apis = 1;</code>
*/
public com.google.cloud.apihub.v1.ExternalApi.Builder addExternalApisBuilder(int index) {
return getExternalApisFieldBuilder()
.addBuilder(index, com.google.cloud.apihub.v1.ExternalApi.getDefaultInstance());
}
/**
*
*
* <pre>
* The External API resources present in the API hub.
* </pre>
*
* <code>repeated .google.cloud.apihub.v1.ExternalApi external_apis = 1;</code>
*/
public java.util.List<com.google.cloud.apihub.v1.ExternalApi.Builder>
getExternalApisBuilderList() {
return getExternalApisFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.apihub.v1.ExternalApi,
com.google.cloud.apihub.v1.ExternalApi.Builder,
com.google.cloud.apihub.v1.ExternalApiOrBuilder>
getExternalApisFieldBuilder() {
if (externalApisBuilder_ == null) {
externalApisBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.apihub.v1.ExternalApi,
com.google.cloud.apihub.v1.ExternalApi.Builder,
com.google.cloud.apihub.v1.ExternalApiOrBuilder>(
externalApis_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
externalApis_ = null;
}
return externalApisBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.apihub.v1.ListExternalApisResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.apihub.v1.ListExternalApisResponse)
private static final com.google.cloud.apihub.v1.ListExternalApisResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.apihub.v1.ListExternalApisResponse();
}
public static com.google.cloud.apihub.v1.ListExternalApisResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListExternalApisResponse> PARSER =
new com.google.protobuf.AbstractParser<ListExternalApisResponse>() {
@java.lang.Override
public ListExternalApisResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListExternalApisResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListExternalApisResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.apihub.v1.ListExternalApisResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/rocketmq-clients | 37,307 | java/client/src/main/java/org/apache/rocketmq/client/java/impl/consumer/ProcessQueueImpl.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.rocketmq.client.java.impl.consumer;
import apache.rocketmq.v2.AckMessageRequest;
import apache.rocketmq.v2.AckMessageResponse;
import apache.rocketmq.v2.ChangeInvisibleDurationRequest;
import apache.rocketmq.v2.ChangeInvisibleDurationResponse;
import apache.rocketmq.v2.Code;
import apache.rocketmq.v2.ForwardMessageToDeadLetterQueueRequest;
import apache.rocketmq.v2.ForwardMessageToDeadLetterQueueResponse;
import apache.rocketmq.v2.ReceiveMessageRequest;
import apache.rocketmq.v2.Status;
import com.google.common.util.concurrent.FutureCallback;
import com.google.common.util.concurrent.Futures;
import com.google.common.util.concurrent.ListenableFuture;
import com.google.common.util.concurrent.MoreExecutors;
import com.google.common.util.concurrent.SettableFuture;
import com.google.errorprone.annotations.concurrent.GuardedBy;
import io.grpc.StatusRuntimeException;
import java.time.Duration;
import java.util.ArrayList;
import java.util.Collections;
import java.util.List;
import java.util.Set;
import java.util.UUID;
import java.util.concurrent.ScheduledExecutorService;
import java.util.concurrent.TimeUnit;
import java.util.concurrent.atomic.AtomicLong;
import java.util.concurrent.locks.ReadWriteLock;
import java.util.concurrent.locks.ReentrantReadWriteLock;
import java.util.function.Function;
import java.util.stream.Collectors;
import org.apache.rocketmq.client.apis.consumer.ConsumeResult;
import org.apache.rocketmq.client.apis.consumer.FilterExpression;
import org.apache.rocketmq.client.apis.message.MessageId;
import org.apache.rocketmq.client.apis.message.MessageView;
import org.apache.rocketmq.client.java.exception.BadRequestException;
import org.apache.rocketmq.client.java.exception.TooManyRequestsException;
import org.apache.rocketmq.client.java.hook.MessageHookPoints;
import org.apache.rocketmq.client.java.hook.MessageHookPointsStatus;
import org.apache.rocketmq.client.java.hook.MessageInterceptorContextImpl;
import org.apache.rocketmq.client.java.message.GeneralMessage;
import org.apache.rocketmq.client.java.message.GeneralMessageImpl;
import org.apache.rocketmq.client.java.message.MessageViewImpl;
import org.apache.rocketmq.client.java.misc.ClientId;
import org.apache.rocketmq.client.java.misc.ExcludeFromJacocoGeneratedReport;
import org.apache.rocketmq.client.java.retry.RetryPolicy;
import org.apache.rocketmq.client.java.route.Endpoints;
import org.apache.rocketmq.client.java.route.MessageQueueImpl;
import org.apache.rocketmq.client.java.rpc.RpcFuture;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* Default implementation of {@link ProcessQueue}.
*
* <p>Apart from the basic part mentioned in {@link ProcessQueue}, this implementation
*
* @see ProcessQueue
*/
@SuppressWarnings({"NullableProblems", "UnstableApiUsage"})
class ProcessQueueImpl implements ProcessQueue {
static final Duration FORWARD_FIFO_MESSAGE_TO_DLQ_FAILURE_BACKOFF_DELAY = Duration.ofSeconds(1);
static final Duration ACK_MESSAGE_FAILURE_BACKOFF_DELAY = Duration.ofSeconds(1);
static final Duration CHANGE_INVISIBLE_DURATION_FAILURE_BACKOFF_DELAY = Duration.ofSeconds(1);
private static final Logger log = LoggerFactory.getLogger(ProcessQueueImpl.class);
private static final Duration RECEIVING_FLOW_CONTROL_BACKOFF_DELAY = Duration.ofMillis(20);
private static final Duration RECEIVING_FAILURE_BACKOFF_DELAY = Duration.ofSeconds(1);
private static final Duration RECEIVING_BACKOFF_DELAY_WHEN_CACHE_IS_FULL = Duration.ofSeconds(1);
private final PushConsumerImpl consumer;
/**
* Dropped means {@link ProcessQueue} is deprecated, which means no message would be fetched from remote anymore.
*/
private volatile boolean dropped;
private final MessageQueueImpl mq;
private final FilterExpression filterExpression;
/**
* Messages which is pending means have been cached, but are not taken by consumer dispatcher yet.
*/
@GuardedBy("cachedMessageLock")
private final List<MessageViewImpl> cachedMessages;
private final ReadWriteLock cachedMessageLock;
private final AtomicLong cachedMessagesBytes;
private final AtomicLong receptionTimes;
private final AtomicLong receivedMessagesQuantity;
private volatile long activityNanoTime = System.nanoTime();
private volatile long cacheFullNanoTime = Long.MIN_VALUE;
public ProcessQueueImpl(PushConsumerImpl consumer, MessageQueueImpl mq, FilterExpression filterExpression) {
this.consumer = consumer;
this.dropped = false;
this.mq = mq;
this.filterExpression = filterExpression;
this.cachedMessages = new ArrayList<>();
this.cachedMessageLock = new ReentrantReadWriteLock();
this.cachedMessagesBytes = new AtomicLong();
this.receptionTimes = new AtomicLong(0);
this.receivedMessagesQuantity = new AtomicLong(0);
}
@Override
public MessageQueueImpl getMessageQueue() {
return mq;
}
@Override
public void drop() {
this.dropped = true;
}
@Override
public boolean expired() {
final Duration longPollingTimeout = consumer.getPushConsumerSettings().getLongPollingTimeout();
final Duration requestTimeout = consumer.getClientConfiguration().getRequestTimeout();
final Duration maxIdleDuration = longPollingTimeout.plus(requestTimeout).multipliedBy(3);
final Duration idleDuration = Duration.ofNanos(System.nanoTime() - activityNanoTime);
if (idleDuration.compareTo(maxIdleDuration) < 0) {
return false;
}
final Duration afterCacheFullDuration = Duration.ofNanos(System.nanoTime() - cacheFullNanoTime);
if (afterCacheFullDuration.compareTo(maxIdleDuration) < 0) {
return false;
}
log.warn("Process queue is idle, idleDuration={}, maxIdleDuration={}, afterCacheFullDuration={}, mq={}, "
+ "clientId={}", idleDuration, maxIdleDuration, afterCacheFullDuration, mq, consumer.getClientId());
return true;
}
void cacheMessages(List<MessageViewImpl> messageList) {
cachedMessageLock.writeLock().lock();
try {
for (MessageViewImpl messageView : messageList) {
cachedMessages.add(messageView);
cachedMessagesBytes.addAndGet(messageView.getBody().remaining());
}
} finally {
cachedMessageLock.writeLock().unlock();
}
}
private int getReceptionBatchSize() {
int bufferSize = consumer.cacheMessageCountThresholdPerQueue() - this.cachedMessagesCount();
bufferSize = Math.max(bufferSize, 1);
return Math.min(bufferSize, consumer.getPushConsumerSettings().getReceiveBatchSize());
}
@Override
public void fetchMessageImmediately() {
receiveMessageImmediately();
}
/**
* Receive message later by message queue.
*
* <p> Make sure that no exception will be thrown.
*/
public void onReceiveMessageException(Throwable t, String attemptId) {
Duration delay = t instanceof TooManyRequestsException ? RECEIVING_FLOW_CONTROL_BACKOFF_DELAY :
RECEIVING_FAILURE_BACKOFF_DELAY;
receiveMessageLater(delay, attemptId);
}
private void receiveMessageLater(Duration delay, String attemptId) {
final ClientId clientId = consumer.getClientId();
final ScheduledExecutorService scheduler = consumer.getScheduler();
try {
log.info("Try to receive message later, mq={}, delay={}, clientId={}", mq, delay, clientId);
scheduler.schedule(() -> receiveMessage(attemptId), delay.toNanos(), TimeUnit.NANOSECONDS);
} catch (Throwable t) {
if (scheduler.isShutdown()) {
return;
}
// Should never reach here.
log.error("[Bug] Failed to schedule message receiving request, mq={}, clientId={}", mq, clientId, t);
onReceiveMessageException(t, attemptId);
}
}
private String generateAttemptId() {
return UUID.randomUUID().toString();
}
public void receiveMessage() {
receiveMessage(this.generateAttemptId());
}
public void receiveMessage(String attemptId) {
final ClientId clientId = consumer.getClientId();
if (dropped) {
log.info("Process queue has been dropped, no longer receive message, mq={}, clientId={}", mq, clientId);
return;
}
if (this.isCacheFull()) {
log.warn("Process queue cache is full, would receive message later, mq={}, clientId={}", mq, clientId);
receiveMessageLater(RECEIVING_BACKOFF_DELAY_WHEN_CACHE_IS_FULL, attemptId);
return;
}
receiveMessageImmediately(attemptId);
}
private void receiveMessageImmediately() {
receiveMessageImmediately(this.generateAttemptId());
}
private void receiveMessageImmediately(String attemptId) {
final ClientId clientId = consumer.getClientId();
if (!consumer.isRunning()) {
log.info("Stop to receive message because consumer is not running, mq={}, clientId={}", mq, clientId);
return;
}
try {
final Endpoints endpoints = mq.getBroker().getEndpoints();
final int batchSize = this.getReceptionBatchSize();
final Duration longPollingTimeout = consumer.getPushConsumerSettings().getLongPollingTimeout();
final ReceiveMessageRequest request = consumer.wrapReceiveMessageRequest(batchSize, mq, filterExpression,
longPollingTimeout, attemptId);
activityNanoTime = System.nanoTime();
// Intercept before message reception.
final MessageInterceptorContextImpl context = new MessageInterceptorContextImpl(MessageHookPoints.RECEIVE);
consumer.doBefore(context, Collections.emptyList());
final ListenableFuture<ReceiveMessageResult> future = consumer.receiveMessage(request, mq,
longPollingTimeout);
Futures.addCallback(future, new FutureCallback<ReceiveMessageResult>() {
@Override
public void onSuccess(ReceiveMessageResult result) {
// Intercept after message reception.
final List<GeneralMessage> generalMessages = result.getMessageViewImpls().stream()
.map((Function<MessageView, GeneralMessage>) GeneralMessageImpl::new)
.collect(Collectors.toList());
final MessageInterceptorContextImpl context0 =
new MessageInterceptorContextImpl(context, MessageHookPointsStatus.OK);
consumer.doAfter(context0, generalMessages);
// Only perform message filtering when enableMessageInterceptorFiltering is enabled.
if (consumer.isEnableMessageInterceptorFiltering()) {
final List<MessageViewImpl> originalMessages =
new ArrayList<>(result.getMessageViewImpls());
final Set<MessageId> filteredMessageIds = generalMessages.stream()
.filter(msg -> msg.getMessageId().isPresent())
.map(msg -> msg.getMessageId().get())
.collect(Collectors.toSet());
final List<MessageViewImpl> filteredOutMessages = new ArrayList<>();
final List<MessageViewImpl> remainingMessages = new ArrayList<>();
for (MessageViewImpl originalMsg : originalMessages) {
if (filteredMessageIds.contains(originalMsg.getMessageId())) {
remainingMessages.add(originalMsg);
} else {
filteredOutMessages.add(originalMsg);
}
}
// Ack filtered out messages.
if (!filteredOutMessages.isEmpty()) {
log.info("Acking {} filtered out messages by interceptor, mq={}, clientId={}",
filteredOutMessages.size(), mq, consumer.getClientId());
for (MessageViewImpl filteredOutMsg : filteredOutMessages) {
ListenableFuture<Void> ackFuture = ackMessage(filteredOutMsg);
ackFuture.addListener(() -> {
log.debug("Successfully acked filtered out message, messageId={}, topic={}",
filteredOutMsg.getMessageId(), filteredOutMsg.getTopic());
}, MoreExecutors.directExecutor());
}
}
try {
// Create new ReceiveMessageResult with filtered messages.
ReceiveMessageResult filteredResult =
ReceiveMessageResult.createFilteredResult(result, remainingMessages);
onReceiveMessageResult(filteredResult);
} catch (Throwable t) {
// Should never reach here.
log.error("[Bug] Exception raised while handling receive result, mq={}, endpoints={}, "
+ "clientId={}", mq, endpoints, clientId, t);
onReceiveMessageException(t, attemptId);
}
} else {
// When filtering is disabled, use original result directly to avoid performance overhead.
try {
onReceiveMessageResult(result);
} catch (Throwable t) {
// Should never reach here.
log.error("[Bug] Exception raised while handling receive result, mq={}, endpoints={}, "
+ "clientId={}", mq, endpoints, clientId, t);
onReceiveMessageException(t, attemptId);
}
}
}
@Override
public void onFailure(Throwable t) {
String nextAttemptId = null;
if (t instanceof StatusRuntimeException) {
StatusRuntimeException exception = (StatusRuntimeException) t;
if (io.grpc.Status.DEADLINE_EXCEEDED.getCode() == exception.getStatus().getCode()) {
nextAttemptId = request.getAttemptId();
}
}
// Intercept after message reception.
final MessageInterceptorContextImpl context0 =
new MessageInterceptorContextImpl(context, MessageHookPointsStatus.ERROR);
consumer.doAfter(context0, Collections.emptyList());
log.error("Exception raised during message reception, mq={}, endpoints={}, attemptId={}, " +
"nextAttemptId={}, clientId={}", mq, endpoints, request.getAttemptId(), nextAttemptId,
clientId, t);
onReceiveMessageException(t, nextAttemptId);
}
}, MoreExecutors.directExecutor());
receptionTimes.getAndIncrement();
consumer.getReceptionTimes().getAndIncrement();
} catch (Throwable t) {
log.error("Exception raised during message reception, mq={}, clientId={}", mq, clientId, t);
onReceiveMessageException(t, attemptId);
}
}
public boolean isCacheFull() {
final int cacheMessageCountThresholdPerQueue = consumer.cacheMessageCountThresholdPerQueue();
final long actualMessagesQuantity = this.cachedMessagesCount();
final ClientId clientId = consumer.getClientId();
if (cacheMessageCountThresholdPerQueue <= actualMessagesQuantity) {
log.warn("Process queue total cached messages quantity exceeds the threshold, threshold={}, actual={}," +
" mq={}, clientId={}", cacheMessageCountThresholdPerQueue, actualMessagesQuantity, mq, clientId);
cacheFullNanoTime = System.nanoTime();
return true;
}
final int cacheMessageBytesThresholdPerQueue = consumer.cacheMessageBytesThresholdPerQueue();
final long actualCachedMessagesBytes = this.cachedMessageBytes();
if (cacheMessageBytesThresholdPerQueue <= actualCachedMessagesBytes) {
log.warn("Process queue total cached messages memory exceeds the threshold, threshold={} bytes," +
" actual={} bytes, mq={}, clientId={}", cacheMessageBytesThresholdPerQueue,
actualCachedMessagesBytes, mq, clientId);
cacheFullNanoTime = System.nanoTime();
return true;
}
return false;
}
@Override
public void discardMessage(MessageViewImpl messageView) {
log.info("Discard message, mq={}, messageId={}, clientId={}", mq, messageView.getMessageId(),
consumer.getClientId());
final ListenableFuture<Void> future = nackMessage(messageView);
future.addListener(() -> evictCache(messageView), MoreExecutors.directExecutor());
}
@Override
public void discardFifoMessage(MessageViewImpl messageView) {
log.info("Discard fifo message, mq={}, messageId={}, clientId={}", mq, messageView.getMessageId(),
consumer.getClientId());
final ListenableFuture<Void> future = forwardToDeadLetterQueue(messageView);
future.addListener(() -> evictCache(messageView), MoreExecutors.directExecutor());
}
public int cachedMessagesCount() {
cachedMessageLock.readLock().lock();
try {
return cachedMessages.size();
} finally {
cachedMessageLock.readLock().unlock();
}
}
public long cachedMessageBytes() {
return cachedMessagesBytes.get();
}
private void onReceiveMessageResult(ReceiveMessageResult result) {
final List<MessageViewImpl> messages = result.getMessageViewImpls();
if (!messages.isEmpty()) {
cacheMessages(messages);
receivedMessagesQuantity.getAndAdd(messages.size());
consumer.getReceivedMessagesQuantity().getAndAdd(messages.size());
consumer.getConsumeService().consume(this, messages);
}
receiveMessage();
}
private void evictCache(MessageViewImpl messageView) {
cachedMessageLock.writeLock().lock();
try {
if (cachedMessages.remove(messageView)) {
cachedMessagesBytes.addAndGet(-messageView.getBody().remaining());
}
} finally {
cachedMessageLock.writeLock().unlock();
}
}
private void statsConsumptionResult(ConsumeResult consumeResult) {
if (ConsumeResult.SUCCESS.equals(consumeResult)) {
consumer.consumptionOkQuantity.incrementAndGet();
return;
}
consumer.consumptionErrorQuantity.incrementAndGet();
}
@Override
public void eraseMessage(MessageViewImpl messageView, ConsumeResult consumeResult) {
statsConsumptionResult(consumeResult);
ListenableFuture<Void> future = ConsumeResult.SUCCESS.equals(consumeResult) ? ackMessage(messageView) :
nackMessage(messageView);
future.addListener(() -> evictCache(messageView), MoreExecutors.directExecutor());
}
private ListenableFuture<Void> nackMessage(final MessageViewImpl messageView) {
final int deliveryAttempt = messageView.getDeliveryAttempt();
final Duration duration = consumer.getRetryPolicy().getNextAttemptDelay(deliveryAttempt);
final SettableFuture<Void> future0 = SettableFuture.create();
changeInvisibleDuration(messageView, duration, 1, future0);
return future0;
}
private void changeInvisibleDuration(final MessageViewImpl messageView, final Duration duration,
final int attempt, final SettableFuture<Void> future0) {
final ClientId clientId = consumer.getClientId();
final String consumerGroup = consumer.getConsumerGroup();
final MessageId messageId = messageView.getMessageId();
final Endpoints endpoints = messageView.getEndpoints();
final RpcFuture<ChangeInvisibleDurationRequest, ChangeInvisibleDurationResponse> future =
consumer.changeInvisibleDuration(messageView, duration);
Futures.addCallback(future, new FutureCallback<ChangeInvisibleDurationResponse>() {
@Override
public void onSuccess(ChangeInvisibleDurationResponse response) {
final String requestId = future.getContext().getRequestId();
final Status status = response.getStatus();
final Code code = status.getCode();
if (Code.INVALID_RECEIPT_HANDLE.equals(code)) {
log.error("Failed to change invisible duration due to the invalid receipt handle, forgive to "
+ "retry, clientId={}, consumerGroup={}, messageId={}, attempt={}, mq={}, endpoints={}, "
+ "requestId={}, status message=[{}]", clientId, consumerGroup, messageId, attempt, mq,
endpoints, requestId, status.getMessage());
future0.setException(new BadRequestException(code.getNumber(), requestId, status.getMessage()));
return;
}
// Log failure and retry later.
if (!Code.OK.equals(code)) {
log.error("Failed to change invisible duration, would retry later, clientId={}, "
+ "consumerGroup={}, messageId={}, attempt={}, mq={}, endpoints={}, requestId={}, "
+ "status message=[{}]", clientId, consumerGroup, messageId, attempt, mq, endpoints,
requestId, status.getMessage());
changeInvisibleDurationLater(messageView, duration, 1 + attempt, future0);
return;
}
// Set result if succeed in changing invisible time.
future0.setFuture(Futures.immediateVoidFuture());
// Log retries.
if (1 < attempt) {
log.info("Finally, change invisible duration successfully, clientId={}, consumerGroup={} "
+ "messageId={}, attempt={}, mq={}, endpoints={}, requestId={}", clientId, consumerGroup,
messageId, attempt, mq, endpoints, requestId);
return;
}
log.debug("Change invisible duration successfully, clientId={}, consumerGroup={}, messageId={}, "
+ "mq={}, endpoints={}, requestId={}", clientId, consumerGroup, messageId, mq, endpoints,
requestId);
}
@Override
public void onFailure(Throwable t) {
// Log failure and retry later.
log.error("Exception raised while changing invisible duration, would retry later, clientId={}, "
+ "consumerGroup={}, messageId={}, mq={}, endpoints={}", clientId, consumerGroup,
messageId, mq, endpoints, t);
changeInvisibleDurationLater(messageView, duration, 1 + attempt, future0);
}
}, MoreExecutors.directExecutor());
}
private void changeInvisibleDurationLater(final MessageViewImpl messageView, final Duration duration,
final int attempt, SettableFuture<Void> future) {
final MessageId messageId = messageView.getMessageId();
final ScheduledExecutorService scheduler = consumer.getScheduler();
try {
scheduler.schedule(() -> changeInvisibleDuration(messageView, duration, attempt, future),
CHANGE_INVISIBLE_DURATION_FAILURE_BACKOFF_DELAY.toNanos(), TimeUnit.NANOSECONDS);
} catch (Throwable t) {
if (scheduler.isShutdown()) {
return;
}
// Should never reach here.
log.error("[Bug] Failed to schedule message change invisible duration request, mq={}, messageId={}, "
+ "clientId={}", mq, messageId, consumer.getClientId());
changeInvisibleDurationLater(messageView, duration, 1 + attempt, future);
}
}
@Override
public ListenableFuture<Void> eraseFifoMessage(MessageViewImpl messageView, ConsumeResult consumeResult) {
statsConsumptionResult(consumeResult);
final RetryPolicy retryPolicy = consumer.getRetryPolicy();
final int maxAttempts = retryPolicy.getMaxAttempts();
int attempt = messageView.getDeliveryAttempt();
final MessageId messageId = messageView.getMessageId();
final ConsumeService service = consumer.getConsumeService();
final ClientId clientId = consumer.getClientId();
if (ConsumeResult.FAILURE.equals(consumeResult) && attempt < maxAttempts) {
final Duration nextAttemptDelay = retryPolicy.getNextAttemptDelay(attempt);
attempt = messageView.incrementAndGetDeliveryAttempt();
log.debug("Prepare to redeliver the fifo message because of the consumption failure, maxAttempt={}," +
" attempt={}, mq={}, messageId={}, nextAttemptDelay={}, clientId={}", maxAttempts, attempt, mq,
messageId, nextAttemptDelay, clientId);
final ListenableFuture<ConsumeResult> future = service.consume(messageView, nextAttemptDelay);
return Futures.transformAsync(future, result -> eraseFifoMessage(messageView, result),
MoreExecutors.directExecutor());
}
boolean ok = ConsumeResult.SUCCESS.equals(consumeResult);
if (!ok) {
log.info("Failed to consume fifo message finally, run out of attempt times, maxAttempts={}, "
+ "attempt={}, mq={}, messageId={}, clientId={}", maxAttempts, attempt, mq, messageId, clientId);
}
// Ack message or forward it to DLQ depends on consumption result.
ListenableFuture<Void> future = ok ? ackMessage(messageView) : forwardToDeadLetterQueue(messageView);
future.addListener(() -> evictCache(messageView), consumer.getConsumptionExecutor());
return future;
}
private ListenableFuture<Void> forwardToDeadLetterQueue(final MessageViewImpl messageView) {
final SettableFuture<Void> future = SettableFuture.create();
forwardToDeadLetterQueue(messageView, 1, future);
return future;
}
private void forwardToDeadLetterQueue(final MessageViewImpl messageView, final int attempt,
final SettableFuture<Void> future0) {
final RpcFuture<ForwardMessageToDeadLetterQueueRequest, ForwardMessageToDeadLetterQueueResponse> future =
consumer.forwardMessageToDeadLetterQueue(messageView);
final ClientId clientId = consumer.getClientId();
final String consumerGroup = consumer.getConsumerGroup();
final MessageId messageId = messageView.getMessageId();
final Endpoints endpoints = messageView.getEndpoints();
Futures.addCallback(future, new FutureCallback<ForwardMessageToDeadLetterQueueResponse>() {
@Override
public void onSuccess(ForwardMessageToDeadLetterQueueResponse response) {
final String requestId = future.getContext().getRequestId();
final Status status = response.getStatus();
final Code code = status.getCode();
// Log failure and retry later.
if (!Code.OK.equals(code)) {
log.error("Failed to forward message to dead letter queue, would attempt to re-forward later," +
" clientId={}, consumerGroup={}, messageId={}, attempt={}, mq={}, endpoints={}, "
+ "requestId={}, code={}, status message={}", clientId, consumerGroup, messageId, attempt,
mq, endpoints, requestId, code, status.getMessage());
forwardToDeadLetterQueueLater(messageView, 1 + attempt, future0);
return;
}
// Set result if message is forwarded successfully.
future0.setFuture(Futures.immediateVoidFuture());
// Log retries.
if (1 < attempt) {
log.info("Re-forward message to dead letter queue successfully, clientId={}, consumerGroup={}, "
+ "attempt={}, messageId={}, mq={}, endpoints={}, requestId={}", clientId, consumerGroup,
attempt, messageId, mq, endpoints, requestId);
return;
}
log.info("Forward message to dead letter queue successfully, clientId={}, consumerGroup={}, "
+ "messageId={}, mq={}, endpoints={}, requestId={}", clientId, consumerGroup, messageId, mq,
endpoints, requestId);
}
@Override
public void onFailure(Throwable t) {
// Log failure and retry later.
log.error("Exception raised while forward message to DLQ, would attempt to re-forward later, " +
"clientId={}, consumerGroup={}, attempt={}, messageId={}, mq={}", clientId, consumerGroup,
attempt, messageId, mq, t);
forwardToDeadLetterQueueLater(messageView, 1 + attempt, future0);
}
}, MoreExecutors.directExecutor());
}
private void forwardToDeadLetterQueueLater(final MessageViewImpl messageView, final int attempt,
final SettableFuture<Void> future0) {
final ScheduledExecutorService scheduler = consumer.getScheduler();
try {
scheduler.schedule(() -> forwardToDeadLetterQueue(messageView, attempt, future0),
FORWARD_FIFO_MESSAGE_TO_DLQ_FAILURE_BACKOFF_DELAY.toNanos(), TimeUnit.NANOSECONDS);
} catch (Throwable t) {
if (scheduler.isShutdown()) {
return;
}
// Should never reach here.
log.error("[Bug] Failed to schedule DLQ message request, mq={}, messageId={}, clientId={}", mq,
messageView.getMessageId(), consumer.getClientId());
forwardToDeadLetterQueueLater(messageView, 1 + attempt, future0);
}
}
private ListenableFuture<Void> ackMessage(final MessageViewImpl messageView) {
SettableFuture<Void> future = SettableFuture.create();
ackMessage(messageView, 1, future);
return future;
}
private void ackMessage(final MessageViewImpl messageView, final int attempt, final SettableFuture<Void> future0) {
final ClientId clientId = consumer.getClientId();
final String consumerGroup = consumer.getConsumerGroup();
final MessageId messageId = messageView.getMessageId();
final Endpoints endpoints = messageView.getEndpoints();
final RpcFuture<AckMessageRequest, AckMessageResponse> future =
consumer.ackMessage(messageView);
Futures.addCallback(future, new FutureCallback<AckMessageResponse>() {
@Override
public void onSuccess(AckMessageResponse response) {
final String requestId = future.getContext().getRequestId();
final Status status = response.getStatus();
final Code code = status.getCode();
if (Code.INVALID_RECEIPT_HANDLE.equals(code)) {
log.error("Failed to ack message due to the invalid receipt handle, forgive to retry, "
+ "clientId={}, consumerGroup={}, messageId={}, attempt={}, mq={}, endpoints={}, "
+ "requestId={}, status message=[{}]", clientId, consumerGroup, messageId, attempt, mq,
endpoints, requestId, status.getMessage());
future0.setException(new BadRequestException(code.getNumber(), requestId, status.getMessage()));
return;
}
// Log failure and retry later.
if (!Code.OK.equals(code)) {
log.error("Failed to ack message, would attempt to re-ack later, clientId={}, "
+ "consumerGroup={}, attempt={}, messageId={}, mq={}, code={}, requestId={}, endpoints={}, "
+ "status message=[{}]", clientId, consumerGroup, attempt, messageId, mq, code, requestId,
endpoints, status.getMessage());
ackMessageLater(messageView, 1 + attempt, future0);
return;
}
// Set result if FIFO message is acknowledged successfully.
future0.setFuture(Futures.immediateVoidFuture());
// Log retries.
if (1 < attempt) {
log.info("Finally, ack message successfully, clientId={}, consumerGroup={}, attempt={}, "
+ "messageId={}, mq={}, endpoints={}, requestId={}", clientId, consumerGroup, attempt,
messageId, mq, endpoints, requestId);
return;
}
log.debug("Ack message successfully, clientId={}, consumerGroup={}, messageId={}, mq={}, "
+ "endpoints={}, requestId={}", clientId, consumerGroup, messageId, mq, endpoints, requestId);
}
@Override
public void onFailure(Throwable t) {
// Log failure and retry later.
log.error("Exception raised while acknowledging message, clientId={}, consumerGroup={}, "
+ "would attempt to re-ack later, attempt={}, messageId={}, mq={}, endpoints={}", clientId,
consumerGroup, attempt, messageId, mq, endpoints, t);
ackMessageLater(messageView, 1 + attempt, future0);
}
}, MoreExecutors.directExecutor());
}
private void ackMessageLater(final MessageViewImpl messageView, final int attempt,
final SettableFuture<Void> future) {
final MessageId messageId = messageView.getMessageId();
final ScheduledExecutorService scheduler = consumer.getScheduler();
try {
scheduler.schedule(() -> ackMessage(messageView, attempt, future),
ACK_MESSAGE_FAILURE_BACKOFF_DELAY.toNanos(), TimeUnit.NANOSECONDS);
} catch (Throwable t) {
if (scheduler.isShutdown()) {
return;
}
// Should never reach here.
log.error("[Bug] Failed to schedule message ack request, mq={}, messageId={}, clientId={}",
mq, messageId, consumer.getClientId());
ackMessageLater(messageView, 1 + attempt, future);
}
}
@Override
public long getCachedMessageCount() {
cachedMessageLock.readLock().lock();
try {
return cachedMessages.size();
} finally {
cachedMessageLock.readLock().unlock();
}
}
@Override
public long getCachedMessageBytes() {
return cachedMessagesBytes.get();
}
@ExcludeFromJacocoGeneratedReport
public void doStats() {
final long receptionTimes = this.receptionTimes.getAndSet(0);
final long receivedMessagesQuantity = this.receivedMessagesQuantity.getAndSet(0);
log.info("Process queue stats: clientId={}, mq={}, receptionTimes={}, receivedMessageQuantity={}, "
+ "cachedMessageCount={}, cachedMessageBytes={}", consumer.getClientId(), mq, receptionTimes,
receivedMessagesQuantity, this.getCachedMessageCount(), this.getCachedMessageBytes());
}
}
|
google/j2objc | 37,166 | jre_emul/android/platform/external/icu/android_icu4j/src/main/java/android/icu/impl/TimeZoneGenericNames.java | /* GENERATED SOURCE. DO NOT MODIFY. */
// © 2016 and later: Unicode, Inc. and others.
// License & terms of use: http://www.unicode.org/copyright.html#License
/*
*******************************************************************************
* Copyright (C) 2011-2016, International Business Machines Corporation and
* others. All Rights Reserved.
*******************************************************************************
*/
package android.icu.impl;
import java.io.IOException;
import java.io.ObjectInputStream;
import java.io.Serializable;
import java.lang.ref.WeakReference;
import java.text.MessageFormat;
import java.util.Collection;
import java.util.EnumSet;
import java.util.Iterator;
import java.util.LinkedList;
import java.util.MissingResourceException;
import java.util.Set;
import java.util.concurrent.ConcurrentHashMap;
import android.icu.impl.TextTrieMap.ResultHandler;
import android.icu.text.LocaleDisplayNames;
import android.icu.text.TimeZoneFormat.TimeType;
import android.icu.text.TimeZoneNames;
import android.icu.text.TimeZoneNames.MatchInfo;
import android.icu.text.TimeZoneNames.NameType;
import android.icu.util.BasicTimeZone;
import android.icu.util.Freezable;
import android.icu.util.Output;
import android.icu.util.TimeZone;
import android.icu.util.TimeZone.SystemTimeZoneType;
import android.icu.util.TimeZoneTransition;
import android.icu.util.ULocale;
/**
* This class interact with TimeZoneNames and LocaleDisplayNames
* to format and parse time zone's generic display names.
* It is not recommended to use this class directly, instead
* use android.icu.text.TimeZoneFormat.
* @hide Only a subset of ICU is exposed in Android
*/
public class TimeZoneGenericNames implements Serializable, Freezable<TimeZoneGenericNames> {
// Note: This class implements Serializable, but we no longer serialize instance of
// TimeZoneGenericNames in ICU 49. ICU 4.8 android.icu.text.TimeZoneFormat used to
// serialize TimeZoneGenericNames field. TimeZoneFormat no longer read TimeZoneGenericNames
// field, we have to keep TimeZoneGenericNames Serializable. Otherwise it fails to read
// (unused) TimeZoneGenericNames serialized data.
private static final long serialVersionUID = 2729910342063468417L;
/**
* Generic name type enum
*/
public enum GenericNameType {
LOCATION ("LONG", "SHORT"),
LONG (),
SHORT ();
String[] _fallbackTypeOf;
GenericNameType(String... fallbackTypeOf) {
_fallbackTypeOf = fallbackTypeOf;
}
public boolean isFallbackTypeOf(GenericNameType type) {
String typeStr = type.toString();
for (String t : _fallbackTypeOf) {
if (t.equals(typeStr)) {
return true;
}
}
return false;
}
}
/**
* Format pattern enum used for composing location and partial location names
*/
public enum Pattern {
// The format pattern such as "{0} Time", where {0} is the country or city.
REGION_FORMAT("regionFormat", "({0})"),
// Note: FALLBACK_REGION_FORMAT is no longer used since ICU 50/CLDR 22.1
// The format pattern such as "{1} Time ({0})", where {1} is the country and {0} is a city.
//FALLBACK_REGION_FORMAT("fallbackRegionFormat", "{1} ({0})"),
// The format pattern such as "{1} ({0})", where {1} is the metazone, and {0} is the country or city.
FALLBACK_FORMAT("fallbackFormat", "{1} ({0})");
String _key;
String _defaultVal;
Pattern(String key, String defaultVal) {
_key = key;
_defaultVal = defaultVal;
}
String key() {
return _key;
}
String defaultValue() {
return _defaultVal;
}
}
private final ULocale _locale;
private TimeZoneNames _tznames;
private transient volatile boolean _frozen;
private transient String _region;
private transient WeakReference<LocaleDisplayNames> _localeDisplayNamesRef;
private transient MessageFormat[] _patternFormatters;
private transient ConcurrentHashMap<String, String> _genericLocationNamesMap;
private transient ConcurrentHashMap<String, String> _genericPartialLocationNamesMap;
private transient TextTrieMap<NameInfo> _gnamesTrie;
private transient boolean _gnamesTrieFullyLoaded;
private static Cache GENERIC_NAMES_CACHE = new Cache();
// Window size used for DST check for a zone in a metazone (about a half year)
private static final long DST_CHECK_RANGE = 184L*(24*60*60*1000);
private static final NameType[] GENERIC_NON_LOCATION_TYPES =
{NameType.LONG_GENERIC, NameType.SHORT_GENERIC};
/**
* Constructs a <code>TimeZoneGenericNames</code> with the given locale
* and the <code>TimeZoneNames</code>.
* @param locale the locale
* @param tznames the TimeZoneNames
*/
public TimeZoneGenericNames(ULocale locale, TimeZoneNames tznames) {
_locale = locale;
_tznames = tznames;
init();
}
/**
* Private method initializing the instance of <code>TimeZoneGenericName</code>.
* This method should be called from a constructor and readObject.
*/
private void init() {
if (_tznames == null) {
_tznames = TimeZoneNames.getInstance(_locale);
}
_genericLocationNamesMap = new ConcurrentHashMap<String, String>();
_genericPartialLocationNamesMap = new ConcurrentHashMap<String, String>();
_gnamesTrie = new TextTrieMap<NameInfo>(true);
_gnamesTrieFullyLoaded = false;
// Preload zone strings for the default time zone
TimeZone tz = TimeZone.getDefault();
String tzCanonicalID = ZoneMeta.getCanonicalCLDRID(tz);
if (tzCanonicalID != null) {
loadStrings(tzCanonicalID);
}
}
/**
* Constructs a <code>TimeZoneGenericNames</code> with the given locale.
* This constructor is private and called from {@link #getInstance(ULocale)}.
* @param locale the locale
*/
private TimeZoneGenericNames(ULocale locale) {
this(locale, null);
}
/**
* The factory method of <code>TimeZoneGenericNames</code>. This static method
* returns a frozen instance of cached <code>TimeZoneGenericNames</code>.
* @param locale the locale
* @return A frozen <code>TimeZoneGenericNames</code>.
*/
public static TimeZoneGenericNames getInstance(ULocale locale) {
String key = locale.getBaseName();
return GENERIC_NAMES_CACHE.getInstance(key, locale);
}
/**
* Returns the display name of the time zone for the given name type
* at the given date, or null if the display name is not available.
*
* @param tz the time zone
* @param type the generic name type - see {@link GenericNameType}
* @param date the date
* @return the display name of the time zone for the given name type
* at the given date, or null.
*/
public String getDisplayName(TimeZone tz, GenericNameType type, long date) {
String name = null;
String tzCanonicalID = null;
switch (type) {
case LOCATION:
tzCanonicalID = ZoneMeta.getCanonicalCLDRID(tz);
if (tzCanonicalID != null) {
name = getGenericLocationName(tzCanonicalID);
}
break;
case LONG:
case SHORT:
name = formatGenericNonLocationName(tz, type, date);
if (name == null) {
tzCanonicalID = ZoneMeta.getCanonicalCLDRID(tz);
if (tzCanonicalID != null) {
name = getGenericLocationName(tzCanonicalID);
}
}
break;
}
return name;
}
/**
* Returns the generic location name for the given canonical time zone ID.
*
* @param canonicalTzID the canonical time zone ID
* @return the generic location name for the given canonical time zone ID.
*/
public String getGenericLocationName(String canonicalTzID) {
if (canonicalTzID == null || canonicalTzID.length() == 0) {
return null;
}
String name = _genericLocationNamesMap.get(canonicalTzID);
if (name != null) {
if (name.length() == 0) {
// empty string to indicate the name is not available
return null;
}
return name;
}
Output<Boolean> isPrimary = new Output<Boolean>();
String countryCode = ZoneMeta.getCanonicalCountry(canonicalTzID, isPrimary);
if (countryCode != null) {
if (isPrimary.value) {
// If this is only the single zone in the country, use the country name
String country = getLocaleDisplayNames().regionDisplayName(countryCode);
name = formatPattern(Pattern.REGION_FORMAT, country);
} else {
// If there are multiple zones including this in the country,
// use the exemplar city name
// getExemplarLocationName should return non-empty String
// if the time zone is associated with a location
String city = _tznames.getExemplarLocationName(canonicalTzID);
name = formatPattern(Pattern.REGION_FORMAT, city);
}
}
if (name == null) {
_genericLocationNamesMap.putIfAbsent(canonicalTzID.intern(), "");
} else {
synchronized (this) { // we have to sync the name map and the trie
canonicalTzID = canonicalTzID.intern();
String tmp = _genericLocationNamesMap.putIfAbsent(canonicalTzID, name.intern());
if (tmp == null) {
// Also put the name info the to trie
NameInfo info = new NameInfo(canonicalTzID, GenericNameType.LOCATION);
_gnamesTrie.put(name, info);
} else {
name = tmp;
}
}
}
return name;
}
/**
* Sets the pattern string for the pattern type.
* Note: This method is designed for CLDR ST - not for common use.
* @param patType the pattern type
* @param patStr the pattern string
* @return this object.
*/
public TimeZoneGenericNames setFormatPattern(Pattern patType, String patStr) {
if (isFrozen()) {
throw new UnsupportedOperationException("Attempt to modify frozen object");
}
// Changing pattern will invalidates cached names
if (!_genericLocationNamesMap.isEmpty()) {
_genericLocationNamesMap = new ConcurrentHashMap<String, String>();
}
if (!_genericPartialLocationNamesMap.isEmpty()) {
_genericPartialLocationNamesMap = new ConcurrentHashMap<String, String>();
}
_gnamesTrie = null;
_gnamesTrieFullyLoaded = false;
if (_patternFormatters == null) {
_patternFormatters = new MessageFormat[Pattern.values().length];
}
_patternFormatters[patType.ordinal()] = new MessageFormat(patStr);
return this;
}
/**
* Private method to get a generic string, with fallback logics involved,
* that is,
*
* 1. If a generic non-location string is available for the zone, return it.
* 2. If a generic non-location string is associated with a meta zone and
* the zone never use daylight time around the given date, use the standard
* string (if available).
* 3. If a generic non-location string is associated with a meta zone and
* the offset at the given time is different from the preferred zone for the
* current locale, then return the generic partial location string (if available)
* 4. If a generic non-location string is not available, use generic location
* string.
*
* @param tz the requested time zone
* @param date the date
* @param type the generic name type, either LONG or SHORT
* @return the name used for a generic name type, which could be the
* generic name, or the standard name (if the zone does not observes DST
* around the date), or the partial location name.
*/
private String formatGenericNonLocationName(TimeZone tz, GenericNameType type, long date) {
assert(type == GenericNameType.LONG || type == GenericNameType.SHORT);
String tzID = ZoneMeta.getCanonicalCLDRID(tz);
if (tzID == null) {
return null;
}
// Try to get a name from time zone first
NameType nameType = (type == GenericNameType.LONG) ? NameType.LONG_GENERIC : NameType.SHORT_GENERIC;
String name = _tznames.getTimeZoneDisplayName(tzID, nameType);
if (name != null) {
return name;
}
// Try meta zone
String mzID = _tznames.getMetaZoneID(tzID, date);
if (mzID != null) {
boolean useStandard = false;
int[] offsets = {0, 0};
tz.getOffset(date, false, offsets);
if (offsets[1] == 0) {
useStandard = true;
// Check if the zone actually uses daylight saving time around the time
if (tz instanceof BasicTimeZone) {
BasicTimeZone btz = (BasicTimeZone)tz;
TimeZoneTransition before = btz.getPreviousTransition(date, true);
if (before != null
&& (date - before.getTime() < DST_CHECK_RANGE)
&& before.getFrom().getDSTSavings() != 0) {
useStandard = false;
} else {
TimeZoneTransition after = btz.getNextTransition(date, false);
if (after != null
&& (after.getTime() - date < DST_CHECK_RANGE)
&& after.getTo().getDSTSavings() != 0) {
useStandard = false;
}
}
} else {
// If not BasicTimeZone... only if the instance is not an ICU's implementation.
// We may get a wrong answer in edge case, but it should practically work OK.
int[] tmpOffsets = new int[2];
tz.getOffset(date - DST_CHECK_RANGE, false, tmpOffsets);
if (tmpOffsets[1] != 0) {
useStandard = false;
} else {
tz.getOffset(date + DST_CHECK_RANGE, false, tmpOffsets);
if (tmpOffsets[1] != 0){
useStandard = false;
}
}
}
}
if (useStandard) {
NameType stdNameType = (nameType == NameType.LONG_GENERIC) ?
NameType.LONG_STANDARD : NameType.SHORT_STANDARD;
String stdName = _tznames.getDisplayName(tzID, stdNameType, date);
if (stdName != null) {
name = stdName;
// TODO: revisit this issue later
// In CLDR, a same display name is used for both generic and standard
// for some meta zones in some locales. This looks like a data bugs.
// For now, we check if the standard name is different from its generic
// name below.
String mzGenericName = _tznames.getMetaZoneDisplayName(mzID, nameType);
if (stdName.equalsIgnoreCase(mzGenericName)) {
name = null;
}
}
}
if (name == null) {
// Get a name from meta zone
String mzName = _tznames.getMetaZoneDisplayName(mzID, nameType);
if (mzName != null) {
// Check if we need to use a partial location format.
// This check is done by comparing offset with the meta zone's
// golden zone at the given date.
String goldenID = _tznames.getReferenceZoneID(mzID, getTargetRegion());
if (goldenID != null && !goldenID.equals(tzID)) {
TimeZone goldenZone = TimeZone.getFrozenTimeZone(goldenID);
int[] offsets1 = {0, 0};
// Check offset in the golden zone with wall time.
// With getOffset(date, false, offsets1),
// you may get incorrect results because of time overlap at DST->STD
// transition.
goldenZone.getOffset(date + offsets[0] + offsets[1], true, offsets1);
if (offsets[0] != offsets1[0] || offsets[1] != offsets1[1]) {
// Now we need to use a partial location format.
name = getPartialLocationName(tzID, mzID, (nameType == NameType.LONG_GENERIC), mzName);
} else {
name = mzName;
}
} else {
name = mzName;
}
}
}
}
return name;
}
/**
* Private simple pattern formatter used for formatting generic location names
* and partial location names. We intentionally use JDK MessageFormat
* for performance reason.
*
* @param pat the message pattern enum
* @param args the format argument(s)
* @return the formatted string
*/
private synchronized String formatPattern(Pattern pat, String... args) {
if (_patternFormatters == null) {
_patternFormatters = new MessageFormat[Pattern.values().length];
}
int idx = pat.ordinal();
if (_patternFormatters[idx] == null) {
String patText;
try {
ICUResourceBundle bundle = (ICUResourceBundle) ICUResourceBundle.getBundleInstance(
ICUData.ICU_ZONE_BASE_NAME, _locale);
patText = bundle.getStringWithFallback("zoneStrings/" + pat.key());
} catch (MissingResourceException e) {
patText = pat.defaultValue();
}
_patternFormatters[idx] = new MessageFormat(patText);
}
return _patternFormatters[idx].format(args);
}
/**
* Private method returning LocaleDisplayNames instance for the locale of this
* instance. Because LocaleDisplayNames is only used for generic
* location formant and partial location format, the LocaleDisplayNames
* is instantiated lazily.
*
* @return the instance of LocaleDisplayNames for the locale of this object.
*/
private synchronized LocaleDisplayNames getLocaleDisplayNames() {
LocaleDisplayNames locNames = null;
if (_localeDisplayNamesRef != null) {
locNames = _localeDisplayNamesRef.get();
}
if (locNames == null) {
locNames = LocaleDisplayNames.getInstance(_locale);
_localeDisplayNamesRef = new WeakReference<LocaleDisplayNames>(locNames);
}
return locNames;
}
private synchronized void loadStrings(String tzCanonicalID) {
if (tzCanonicalID == null || tzCanonicalID.length() == 0) {
return;
}
// getGenericLocationName() formats a name and put it into the trie
getGenericLocationName(tzCanonicalID);
// Generic partial location format
Set<String> mzIDs = _tznames.getAvailableMetaZoneIDs(tzCanonicalID);
for (String mzID : mzIDs) {
// if this time zone is not the golden zone of the meta zone,
// partial location name (such as "PT (Los Angeles)") might be
// available.
String goldenID = _tznames.getReferenceZoneID(mzID, getTargetRegion());
if (!tzCanonicalID.equals(goldenID)) {
for (NameType genNonLocType : GENERIC_NON_LOCATION_TYPES) {
String mzGenName = _tznames.getMetaZoneDisplayName(mzID, genNonLocType);
if (mzGenName != null) {
// getPartialLocationName() formats a name and put it into the trie
getPartialLocationName(tzCanonicalID, mzID, (genNonLocType == NameType.LONG_GENERIC), mzGenName);
}
}
}
}
}
/**
* Private method returning the target region. The target regions is determined by
* the locale of this instance. When a generic name is coming from
* a meta zone, this region is used for checking if the time zone
* is a reference zone of the meta zone.
*
* @return the target region
*/
private synchronized String getTargetRegion() {
if (_region == null) {
_region = _locale.getCountry();
if (_region.length() == 0) {
ULocale tmp = ULocale.addLikelySubtags(_locale);
_region = tmp.getCountry();
if (_region.length() == 0) {
_region = "001";
}
}
}
return _region;
}
/**
* Private method for formatting partial location names. This format
* is used when a generic name of a meta zone is available, but the given
* time zone is not a reference zone (golden zone) of the meta zone.
*
* @param tzID the canonical time zone ID
* @param mzID the meta zone ID
* @param isLong true when long generic name
* @param mzDisplayName the meta zone generic display name
* @return the partial location format string
*/
private String getPartialLocationName(String tzID, String mzID, boolean isLong, String mzDisplayName) {
String letter = isLong ? "L" : "S";
String key = tzID + "&" + mzID + "#" + letter;
String name = _genericPartialLocationNamesMap.get(key);
if (name != null) {
return name;
}
String location = null;
String countryCode = ZoneMeta.getCanonicalCountry(tzID);
if (countryCode != null) {
// Is this the golden zone for the region?
String regionalGolden = _tznames.getReferenceZoneID(mzID, countryCode);
if (tzID.equals(regionalGolden)) {
// Use country name
location = getLocaleDisplayNames().regionDisplayName(countryCode);
} else {
// Otherwise, use exemplar city name
location = _tznames.getExemplarLocationName(tzID);
}
} else {
location = _tznames.getExemplarLocationName(tzID);
if (location == null) {
// This could happen when the time zone is not associated with a country,
// and its ID is not hierarchical, for example, CST6CDT.
// We use the canonical ID itself as the location for this case.
location = tzID;
}
}
name = formatPattern(Pattern.FALLBACK_FORMAT, location, mzDisplayName);
synchronized (this) { // we have to sync the name map and the trie
String tmp = _genericPartialLocationNamesMap.putIfAbsent(key.intern(), name.intern());
if (tmp == null) {
NameInfo info = new NameInfo(tzID.intern(),
isLong ? GenericNameType.LONG : GenericNameType.SHORT);
_gnamesTrie.put(name, info);
} else {
name = tmp;
}
}
return name;
}
/**
* A private class used for storing the name information in the local trie.
*/
private static class NameInfo {
final String tzID;
final GenericNameType type;
NameInfo(String tzID, GenericNameType type) {
this.tzID = tzID;
this.type = type;
}
}
/**
* A class used for returning the name search result used by
* {@link TimeZoneGenericNames#find(String, int, EnumSet)}.
*/
public static class GenericMatchInfo {
final GenericNameType nameType;
final String tzID;
final int matchLength;
final TimeType timeType;
private GenericMatchInfo(GenericNameType nameType, String tzID, int matchLength) {
this(nameType, tzID, matchLength, TimeType.UNKNOWN);
}
private GenericMatchInfo(GenericNameType nameType, String tzID, int matchLength, TimeType timeType) {
this.nameType = nameType;
this.tzID = tzID;
this.matchLength = matchLength;
this.timeType = timeType;
}
public GenericNameType nameType() {
return nameType;
}
public String tzID() {
return tzID;
}
public TimeType timeType() {
return timeType;
}
public int matchLength() {
return matchLength;
}
}
/**
* A private class implementing the search callback interface in
* <code>TextTrieMap</code> for collecting match results.
*/
private static class GenericNameSearchHandler implements ResultHandler<NameInfo> {
private EnumSet<GenericNameType> _types;
private Collection<GenericMatchInfo> _matches;
private int _maxMatchLen;
GenericNameSearchHandler(EnumSet<GenericNameType> types) {
_types = types;
}
/* (non-Javadoc)
* @see android.icu.impl.TextTrieMap.ResultHandler#handlePrefixMatch(int, java.util.Iterator)
*/
@Override
public boolean handlePrefixMatch(int matchLength, Iterator<NameInfo> values) {
while (values.hasNext()) {
NameInfo info = values.next();
if (_types != null && !_types.contains(info.type)) {
continue;
}
GenericMatchInfo matchInfo = new GenericMatchInfo(info.type, info.tzID, matchLength);
if (_matches == null) {
_matches = new LinkedList<GenericMatchInfo>();
}
_matches.add(matchInfo);
if (matchLength > _maxMatchLen) {
_maxMatchLen = matchLength;
}
}
return true;
}
/**
* Returns the match results
* @return the match results
*/
public Collection<GenericMatchInfo> getMatches() {
return _matches;
}
/**
* Returns the maximum match length, or 0 if no match was found
* @return the maximum match length
*/
public int getMaxMatchLen() {
return _maxMatchLen;
}
/**
* Resets the match results
*/
public void resetResults() {
_matches = null;
_maxMatchLen = 0;
}
}
/**
* Returns the best match of time zone display name for the specified types in the
* given text at the given offset.
* @param text the text
* @param start the start offset in the text
* @param genericTypes the set of name types.
* @return the best matching name info.
*/
public GenericMatchInfo findBestMatch(String text, int start, EnumSet<GenericNameType> genericTypes) {
if (text == null || text.length() == 0 || start < 0 || start >= text.length()) {
throw new IllegalArgumentException("bad input text or range");
}
GenericMatchInfo bestMatch = null;
// Find matches in the TimeZoneNames first
Collection<MatchInfo> tznamesMatches = findTimeZoneNames(text, start, genericTypes);
if (tznamesMatches != null) {
MatchInfo longestMatch = null;
for (MatchInfo match : tznamesMatches) {
if (longestMatch == null || match.matchLength() > longestMatch.matchLength()) {
longestMatch = match;
}
}
if (longestMatch != null) {
bestMatch = createGenericMatchInfo(longestMatch);
if (bestMatch.matchLength() == (text.length() - start)) {
// Full match
//return bestMatch;
// TODO Some time zone uses a same name for the long standard name
// and the location name. When the match is a long standard name,
// then we need to check if the name is same with the location name.
// This is probably a data error or a design bug.
// if (bestMatch.nameType != GenericNameType.LONG || bestMatch.timeType != TimeType.STANDARD) {
// return bestMatch;
// }
// TODO The deprecation of commonlyUsed flag introduced the name
// conflict not only for long standard names, but short standard names too.
// These short names (found in zh_Hant) should be gone once we clean
// up CLDR time zone display name data. Once the short name conflict
// problem (with location name) is resolved, we should change the condition
// below back to the original one above. -Yoshito (2011-09-14)
if (bestMatch.timeType != TimeType.STANDARD) {
return bestMatch;
}
}
}
}
// Find matches in the local trie
Collection<GenericMatchInfo> localMatches = findLocal(text, start, genericTypes);
if (localMatches != null) {
for (GenericMatchInfo match : localMatches) {
// TODO See the above TODO. We use match.matchLength() >= bestMatch.matcheLength()
// for the reason described above.
//if (bestMatch == null || match.matchLength() > bestMatch.matchLength()) {
if (bestMatch == null || match.matchLength() >= bestMatch.matchLength()) {
bestMatch = match;
}
}
}
return bestMatch;
}
/**
* Returns a collection of time zone display name matches for the specified types in the
* given text at the given offset.
* @param text the text
* @param start the start offset in the text
* @param genericTypes the set of name types.
* @return A collection of match info.
*/
public Collection<GenericMatchInfo> find(String text, int start, EnumSet<GenericNameType> genericTypes) {
if (text == null || text.length() == 0 || start < 0 || start >= text.length()) {
throw new IllegalArgumentException("bad input text or range");
}
// Find matches in the local trie
Collection<GenericMatchInfo> results = findLocal(text, start, genericTypes);
// Also find matches in the TimeZoneNames
Collection<MatchInfo> tznamesMatches = findTimeZoneNames(text, start, genericTypes);
if (tznamesMatches != null) {
// transform matches and append them to local matches
for (MatchInfo match : tznamesMatches) {
if (results == null) {
results = new LinkedList<GenericMatchInfo>();
}
results.add(createGenericMatchInfo(match));
}
}
return results;
}
/**
* Returns a <code>GenericMatchInfo</code> for the given <code>MatchInfo</code>.
* @param matchInfo the MatchInfo
* @return A GenericMatchInfo
*/
private GenericMatchInfo createGenericMatchInfo(MatchInfo matchInfo) {
GenericNameType nameType = null;
TimeType timeType = TimeType.UNKNOWN;
switch (matchInfo.nameType()) {
case LONG_STANDARD:
nameType = GenericNameType.LONG;
timeType = TimeType.STANDARD;
break;
case LONG_GENERIC:
nameType = GenericNameType.LONG;
break;
case SHORT_STANDARD:
nameType = GenericNameType.SHORT;
timeType = TimeType.STANDARD;
break;
case SHORT_GENERIC:
nameType = GenericNameType.SHORT;
break;
default:
throw new IllegalArgumentException("Unexpected MatchInfo name type - " + matchInfo.nameType());
}
String tzID = matchInfo.tzID();
if (tzID == null) {
String mzID = matchInfo.mzID();
assert(mzID != null);
tzID = _tznames.getReferenceZoneID(mzID, getTargetRegion());
}
assert(tzID != null);
GenericMatchInfo gmatch = new GenericMatchInfo(nameType, tzID, matchInfo.matchLength(), timeType);
return gmatch;
}
/**
* Returns a collection of time zone display name matches for the specified types in the
* given text at the given offset. This method only finds matches from the TimeZoneNames
* used by this object.
* @param text the text
* @param start the start offset in the text
* @param types the set of name types.
* @return A collection of match info.
*/
private Collection<MatchInfo> findTimeZoneNames(String text, int start, EnumSet<GenericNameType> types) {
Collection<MatchInfo> tznamesMatches = null;
// Check if the target name type is really in the TimeZoneNames
EnumSet<NameType> nameTypes = EnumSet.noneOf(NameType.class);
if (types.contains(GenericNameType.LONG)) {
nameTypes.add(NameType.LONG_GENERIC);
nameTypes.add(NameType.LONG_STANDARD);
}
if (types.contains(GenericNameType.SHORT)) {
nameTypes.add(NameType.SHORT_GENERIC);
nameTypes.add(NameType.SHORT_STANDARD);
}
if (!nameTypes.isEmpty()) {
// Find matches in the TimeZoneNames
tznamesMatches = _tznames.find(text, start, nameTypes);
}
return tznamesMatches;
}
/**
* Returns a collection of time zone display name matches for the specified types in the
* given text at the given offset. This method only finds matches from the local trie,
* that contains 1) generic location names and 2) long/short generic partial location names,
* used by this object.
* @param text the text
* @param start the start offset in the text
* @param types the set of name types.
* @return A collection of match info.
*/
private synchronized Collection<GenericMatchInfo> findLocal(String text, int start, EnumSet<GenericNameType> types) {
GenericNameSearchHandler handler = new GenericNameSearchHandler(types);
_gnamesTrie.find(text, start, handler);
if (handler.getMaxMatchLen() == (text.length() - start) || _gnamesTrieFullyLoaded) {
// perfect match
return handler.getMatches();
}
// All names are not yet loaded into the local trie.
// Load all available names into the trie. This could be very heavy.
Set<String> tzIDs = TimeZone.getAvailableIDs(SystemTimeZoneType.CANONICAL, null, null);
for (String tzID : tzIDs) {
loadStrings(tzID);
}
_gnamesTrieFullyLoaded = true;
// now, try it again
handler.resetResults();
_gnamesTrie.find(text, start, handler);
return handler.getMatches();
}
/**
* <code>TimeZoneGenericNames</code> cache implementation.
*/
private static class Cache extends SoftCache<String, TimeZoneGenericNames, ULocale> {
/* (non-Javadoc)
* @see android.icu.impl.CacheBase#createInstance(java.lang.Object, java.lang.Object)
*/
@Override
protected TimeZoneGenericNames createInstance(String key, ULocale data) {
return new TimeZoneGenericNames(data).freeze();
}
}
/*
* The custom deserialization method.
* This implementation only read locale used by the object.
*/
private void readObject(ObjectInputStream in) throws IOException, ClassNotFoundException {
in.defaultReadObject();
init();
}
/**
* {@inheritDoc}
*/
@Override
public boolean isFrozen() {
return _frozen;
}
/**
* {@inheritDoc}
*/
@Override
public TimeZoneGenericNames freeze() {
_frozen = true;
return this;
}
/**
* {@inheritDoc}
*/
@Override
public TimeZoneGenericNames cloneAsThawed() {
TimeZoneGenericNames copy = null;
try {
copy = (TimeZoneGenericNames)super.clone();
copy._frozen = false;
} catch (Throwable t) {
// This should never happen
}
return copy;
}
}
|
apache/flink-kubernetes-operator | 37,314 | flink-autoscaler/src/test/java/org/apache/flink/autoscaler/ScalingMetricEvaluatorTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.autoscaler;
import org.apache.flink.api.java.tuple.Tuple2;
import org.apache.flink.autoscaler.config.AutoScalerOptions;
import org.apache.flink.autoscaler.metrics.CollectedMetricHistory;
import org.apache.flink.autoscaler.metrics.CollectedMetrics;
import org.apache.flink.autoscaler.metrics.EvaluatedScalingMetric;
import org.apache.flink.autoscaler.metrics.MetricAggregator;
import org.apache.flink.autoscaler.metrics.ScalingMetric;
import org.apache.flink.autoscaler.topology.JobTopology;
import org.apache.flink.autoscaler.topology.VertexInfo;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.runtime.jobgraph.JobVertexID;
import org.junit.jupiter.api.Test;
import java.time.Duration;
import java.time.Instant;
import java.util.Collections;
import java.util.HashMap;
import java.util.Map;
import java.util.TreeMap;
import static org.apache.flink.autoscaler.config.AutoScalerOptions.CATCH_UP_DURATION;
import static org.apache.flink.autoscaler.config.AutoScalerOptions.PREFER_TRACKED_RESTART_TIME;
import static org.apache.flink.autoscaler.config.AutoScalerOptions.RESTART_TIME;
import static org.apache.flink.autoscaler.config.AutoScalerOptions.UTILIZATION_TARGET;
import static org.apache.flink.autoscaler.topology.ShipStrategy.REBALANCE;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertTrue;
/** Scaling evaluator test. */
public class ScalingMetricEvaluatorTest {
private ScalingMetricEvaluator evaluator = new ScalingMetricEvaluator();
@Test
public void testLagBasedSourceScaling() {
var source = new JobVertexID();
var sink = new JobVertexID();
var topology =
new JobTopology(
new VertexInfo(source, Collections.emptyMap(), 1, 1, null),
new VertexInfo(sink, Map.of(source, REBALANCE), 1, 1, null));
var metricHistory = new TreeMap<Instant, CollectedMetrics>();
metricHistory.put(
Instant.ofEpochMilli(1000),
new CollectedMetrics(
Map.of(
source,
Map.of(
ScalingMetric.LAG,
950.,
ScalingMetric.NUM_RECORDS_IN,
0.,
ScalingMetric.NUM_RECORDS_OUT,
0.,
ScalingMetric.LOAD,
.8),
sink,
Map.of(ScalingMetric.NUM_RECORDS_IN, 0., ScalingMetric.LOAD, .4)),
Map.of()));
// Lag 950 -> 1000
// Input records 0 -> 100
// -> Source Data rate = 150
// Output ratio 2
metricHistory.put(
Instant.ofEpochMilli(2000),
new CollectedMetrics(
Map.of(
source,
Map.of(
ScalingMetric.LAG,
1000.,
ScalingMetric.NUM_RECORDS_IN,
100.,
ScalingMetric.NUM_RECORDS_OUT,
200.,
ScalingMetric.LOAD,
.6),
sink,
Map.of(ScalingMetric.NUM_RECORDS_IN, 200., ScalingMetric.LOAD, .3)),
Map.of()));
var conf = new Configuration();
conf.set(CATCH_UP_DURATION, Duration.ofSeconds(2));
var evaluatedMetrics =
evaluator
.evaluate(
conf,
new CollectedMetricHistory(topology, metricHistory, Instant.now()),
Duration.ZERO)
.getVertexMetrics();
assertEquals(
EvaluatedScalingMetric.avg(.7),
evaluatedMetrics.get(source).get(ScalingMetric.LOAD));
assertEquals(
EvaluatedScalingMetric.avg(.35),
evaluatedMetrics.get(sink).get(ScalingMetric.LOAD));
assertEquals(
EvaluatedScalingMetric.avg(150),
evaluatedMetrics.get(source).get(ScalingMetric.TARGET_DATA_RATE));
assertEquals(
EvaluatedScalingMetric.of(500.),
evaluatedMetrics.get(source).get(ScalingMetric.CATCH_UP_DATA_RATE));
assertEquals(
EvaluatedScalingMetric.avg(300),
evaluatedMetrics.get(sink).get(ScalingMetric.TARGET_DATA_RATE));
assertEquals(
EvaluatedScalingMetric.of(1000),
evaluatedMetrics.get(sink).get(ScalingMetric.CATCH_UP_DATA_RATE));
assertEquals(
EvaluatedScalingMetric.of(1000),
evaluatedMetrics.get(source).get(ScalingMetric.LAG));
assertFalse(evaluatedMetrics.get(sink).containsKey(ScalingMetric.LAG));
conf.set(CATCH_UP_DURATION, Duration.ofSeconds(1));
evaluatedMetrics =
evaluator
.evaluate(
conf,
new CollectedMetricHistory(topology, metricHistory, Instant.now()),
Duration.ZERO)
.getVertexMetrics();
assertEquals(
EvaluatedScalingMetric.avg(150),
evaluatedMetrics.get(source).get(ScalingMetric.TARGET_DATA_RATE));
assertEquals(
EvaluatedScalingMetric.of(1000),
evaluatedMetrics.get(source).get(ScalingMetric.CATCH_UP_DATA_RATE));
assertEquals(
EvaluatedScalingMetric.avg(300),
evaluatedMetrics.get(sink).get(ScalingMetric.TARGET_DATA_RATE));
assertEquals(
EvaluatedScalingMetric.of(2000),
evaluatedMetrics.get(sink).get(ScalingMetric.CATCH_UP_DATA_RATE));
// Restart time should not affect evaluated metrics
conf.set(RESTART_TIME, Duration.ofSeconds(2));
evaluatedMetrics =
evaluator
.evaluate(
conf,
new CollectedMetricHistory(topology, metricHistory, Instant.now()),
Duration.ZERO)
.getVertexMetrics();
assertEquals(
EvaluatedScalingMetric.avg(150),
evaluatedMetrics.get(source).get(ScalingMetric.TARGET_DATA_RATE));
assertEquals(
EvaluatedScalingMetric.of(1000),
evaluatedMetrics.get(source).get(ScalingMetric.CATCH_UP_DATA_RATE));
assertEquals(
EvaluatedScalingMetric.avg(300),
evaluatedMetrics.get(sink).get(ScalingMetric.TARGET_DATA_RATE));
assertEquals(
EvaluatedScalingMetric.of(2000),
evaluatedMetrics.get(sink).get(ScalingMetric.CATCH_UP_DATA_RATE));
// Turn off lag based scaling
conf.set(CATCH_UP_DURATION, Duration.ZERO);
evaluatedMetrics =
evaluator
.evaluate(
conf,
new CollectedMetricHistory(topology, metricHistory, Instant.now()),
Duration.ZERO)
.getVertexMetrics();
assertEquals(
EvaluatedScalingMetric.avg(150),
evaluatedMetrics.get(source).get(ScalingMetric.TARGET_DATA_RATE));
assertEquals(
EvaluatedScalingMetric.of(0),
evaluatedMetrics.get(source).get(ScalingMetric.CATCH_UP_DATA_RATE));
assertEquals(
EvaluatedScalingMetric.avg(300),
evaluatedMetrics.get(sink).get(ScalingMetric.TARGET_DATA_RATE));
assertEquals(
EvaluatedScalingMetric.of(0),
evaluatedMetrics.get(sink).get(ScalingMetric.CATCH_UP_DATA_RATE));
// Test 0 lag
metricHistory.clear();
metricHistory.put(
Instant.ofEpochMilli(3000),
new CollectedMetrics(
Map.of(
source,
Map.of(
ScalingMetric.LAG,
0.,
ScalingMetric.NUM_RECORDS_IN,
100.,
ScalingMetric.NUM_RECORDS_OUT,
200.,
ScalingMetric.LOAD,
.6),
sink,
Map.of(ScalingMetric.NUM_RECORDS_IN, 200., ScalingMetric.LOAD, .3)),
Map.of()));
metricHistory.put(
Instant.ofEpochMilli(4000),
new CollectedMetrics(
Map.of(
source,
Map.of(
ScalingMetric.LAG,
0.,
ScalingMetric.NUM_RECORDS_IN,
200.,
ScalingMetric.NUM_RECORDS_OUT,
400.,
ScalingMetric.LOAD,
.6),
sink,
Map.of(ScalingMetric.NUM_RECORDS_IN, 400., ScalingMetric.LOAD, .3)),
Map.of()));
conf.set(CATCH_UP_DURATION, Duration.ofMinutes(1));
evaluatedMetrics =
evaluator
.evaluate(
conf,
new CollectedMetricHistory(topology, metricHistory, Instant.now()),
Duration.ZERO)
.getVertexMetrics();
assertEquals(
EvaluatedScalingMetric.avg(100),
evaluatedMetrics.get(source).get(ScalingMetric.TARGET_DATA_RATE));
assertEquals(
EvaluatedScalingMetric.avg(200),
evaluatedMetrics.get(sink).get(ScalingMetric.TARGET_DATA_RATE));
}
@Test
public void testUtilizationBoundaryComputation() {
var conf = new Configuration();
conf.set(UTILIZATION_TARGET, 0.8);
conf.set(AutoScalerOptions.UTILIZATION_MAX, 0.9);
conf.set(AutoScalerOptions.UTILIZATION_MIN, 0.7);
conf.set(RESTART_TIME, Duration.ofSeconds(1));
conf.set(CATCH_UP_DURATION, Duration.ZERO);
// Default behaviour, restart time does not factor in
assertEquals(Tuple2.of(778.0, 1000.0), getThresholds(700, 0, conf));
conf.set(CATCH_UP_DURATION, Duration.ofSeconds(2));
assertEquals(Tuple2.of(1128.0, 1700.0), getThresholds(700, 350, conf));
assertEquals(Tuple2.of(778.0, 1350.0), getThresholds(700, 0, conf));
// Test thresholds during catchup periods
assertEquals(
Tuple2.of(1050., Double.POSITIVE_INFINITY), getThresholds(700, 350, conf, true));
assertEquals(Tuple2.of(700., Double.POSITIVE_INFINITY), getThresholds(700, 0, conf, true));
}
@Test
public void testUtilizationBoundaryComputationWithRestartTimesTracking() {
var conf = new Configuration();
conf.set(UTILIZATION_TARGET, 0.8);
conf.set(AutoScalerOptions.UTILIZATION_MAX, 0.9);
conf.set(AutoScalerOptions.UTILIZATION_MIN, 0.7);
conf.set(RESTART_TIME, Duration.ofMinutes(10));
conf.set(CATCH_UP_DURATION, Duration.ZERO);
conf.set(PREFER_TRACKED_RESTART_TIME, true);
var scalingTracking = new ScalingTracking();
scalingTracking.addScalingRecord(
Instant.parse("2023-11-15T16:00:00.00Z"), new ScalingRecord(Duration.ofMinutes(3)));
scalingTracking.addScalingRecord(
Instant.parse("2023-11-15T16:20:00.00Z"), new ScalingRecord(Duration.ofMinutes(5)));
var restartTimeSec = scalingTracking.getMaxRestartTimeOrDefault(conf);
// Restart time does not factor in
assertEquals(Tuple2.of(778.0, 1000.0), getThresholds(700, 0, restartTimeSec, conf));
conf.set(CATCH_UP_DURATION, Duration.ofMinutes(1));
assertEquals(Tuple2.of(1128.0, 4850.0), getThresholds(700, 350, restartTimeSec, conf));
assertEquals(Tuple2.of(778.0, 4500.0), getThresholds(700, 0, restartTimeSec, conf));
// Test thresholds during catchup periods
assertEquals(
Tuple2.of(1050., Double.POSITIVE_INFINITY),
getThresholds(700, 350, restartTimeSec, conf, true));
assertEquals(
Tuple2.of(700., Double.POSITIVE_INFINITY),
getThresholds(700, 0, restartTimeSec, conf, true));
}
@Test
public void testBacklogProcessingEvaluation() {
var source = new JobVertexID();
var sink = new JobVertexID();
var conf = new Configuration();
var topology =
new JobTopology(
new VertexInfo(source, Collections.emptyMap(), 1, 1),
new VertexInfo(sink, Map.of(source, REBALANCE), 1, 1));
var metricHistory = new TreeMap<Instant, CollectedMetrics>();
// 0 lag
metricHistory.put(
Instant.ofEpochMilli(0),
new CollectedMetrics(
Map.of(
source,
Map.of(ScalingMetric.LAG, 0., ScalingMetric.NUM_RECORDS_IN, 0.),
sink,
Map.of()),
Map.of()));
metricHistory.put(
Instant.ofEpochMilli(1000),
new CollectedMetrics(
Map.of(
source,
Map.of(ScalingMetric.LAG, 0., ScalingMetric.NUM_RECORDS_IN, 100.),
sink,
Map.of()),
Map.of()));
assertFalse(ScalingMetricEvaluator.isProcessingBacklog(topology, metricHistory, conf));
// Missing lag
metricHistory.put(
Instant.ofEpochMilli(1000),
new CollectedMetrics(
Map.of(source, Map.of(ScalingMetric.NUM_RECORDS_IN, 100.), sink, Map.of()),
Map.of()));
assertFalse(ScalingMetricEvaluator.isProcessingBacklog(topology, metricHistory, conf));
// Catch up time is more than a minute at avg proc rate (200)
metricHistory.put(
Instant.ofEpochMilli(1000),
new CollectedMetrics(
Map.of(
source,
Map.of(
ScalingMetric.LAG,
250.
* conf.get(
AutoScalerOptions
.BACKLOG_PROCESSING_LAG_THRESHOLD)
.toSeconds(),
ScalingMetric.NUM_RECORDS_IN,
200.),
sink,
Map.of()),
Map.of()));
assertTrue(ScalingMetricEvaluator.isProcessingBacklog(topology, metricHistory, conf));
// Catch up time is less than a minute at avg proc rate (200)
metricHistory.put(
Instant.ofEpochMilli(1000),
new CollectedMetrics(
Map.of(
source,
Map.of(
ScalingMetric.LAG,
180.
* conf.get(
AutoScalerOptions
.BACKLOG_PROCESSING_LAG_THRESHOLD)
.toSeconds(),
ScalingMetric.NUM_RECORDS_IN,
200.),
sink,
Map.of()),
Map.of()));
assertFalse(ScalingMetricEvaluator.isProcessingBacklog(topology, metricHistory, conf));
}
@Test
public void testObservedTprEvaluation() {
var source = new JobVertexID();
var conf = new Configuration();
var metricHistory = new TreeMap<Instant, CollectedMetrics>();
metricHistory.put(
Instant.ofEpochMilli(1000),
new CollectedMetrics(
Map.of(source, Map.of(ScalingMetric.OBSERVED_TPR, 200.)), Map.of()));
metricHistory.put(
Instant.ofEpochMilli(2000),
new CollectedMetrics(
Map.of(source, Map.of(ScalingMetric.OBSERVED_TPR, 400.)), Map.of()));
// Observed TPR average : 300
// Set diff threshold to 20% -> within threshold
conf.set(AutoScalerOptions.OBSERVED_TRUE_PROCESSING_RATE_SWITCH_THRESHOLD, 0.2);
// Test that we used busy time based TPR
assertEquals(
350,
ScalingMetricEvaluator.computeTrueProcessingRate(
100, 35, metricHistory, source, conf));
// Set diff threshold to 10% -> outside threshold
conf.set(AutoScalerOptions.OBSERVED_TRUE_PROCESSING_RATE_SWITCH_THRESHOLD, 0.1);
// Test that we used the observed TPR
assertEquals(
300,
ScalingMetricEvaluator.computeTrueProcessingRate(
100, 35, metricHistory, source, conf));
// Test that observed tpr min observations are respected. If less, use busy time
conf.set(AutoScalerOptions.OBSERVED_TRUE_PROCESSING_RATE_MIN_OBSERVATIONS, 3);
assertEquals(
350,
ScalingMetricEvaluator.computeTrueProcessingRate(
100, 35, metricHistory, source, conf));
}
@Test
public void testMissingObservedTpr() {
var source = new JobVertexID();
var conf = new Configuration();
var metricHistory = new TreeMap<Instant, CollectedMetrics>();
// Test that we used busy time based TPR even if infinity
assertEquals(
350.,
ScalingMetricEvaluator.computeTrueProcessingRate(
100, 35, metricHistory, source, conf));
assertEquals(
Double.POSITIVE_INFINITY,
ScalingMetricEvaluator.computeTrueProcessingRate(
0, 100, metricHistory, source, conf));
assertEquals(
Double.POSITIVE_INFINITY,
ScalingMetricEvaluator.computeTrueProcessingRate(
0, 0, metricHistory, source, conf));
assertEquals(
Double.NaN,
ScalingMetricEvaluator.computeTrueProcessingRate(
Double.NaN, Double.NaN, metricHistory, source, conf));
}
@Test
public void testMissingBusyTimeTpr() {
var source = new JobVertexID();
var metricHistory = new TreeMap<Instant, CollectedMetrics>();
metricHistory.put(
Instant.ofEpochMilli(1000),
new CollectedMetrics(
Map.of(source, Map.of(ScalingMetric.OBSERVED_TPR, 200.)), Map.of()));
metricHistory.put(
Instant.ofEpochMilli(2000),
new CollectedMetrics(
Map.of(source, Map.of(ScalingMetric.OBSERVED_TPR, 400.)), Map.of()));
assertEquals(
300.,
ScalingMetricEvaluator.computeTrueProcessingRate(
Double.NaN, 1., metricHistory, source, new Configuration()));
}
@Test
public void testGlobalMetricEvaluation() {
var globalMetrics = new TreeMap<Instant, CollectedMetrics>();
globalMetrics.put(Instant.now(), new CollectedMetrics(Map.of(), Map.of()));
assertEquals(
Map.of(
ScalingMetric.HEAP_MAX_USAGE_RATIO,
EvaluatedScalingMetric.of(Double.NaN),
ScalingMetric.GC_PRESSURE,
EvaluatedScalingMetric.of(Double.NaN),
ScalingMetric.HEAP_MEMORY_USED,
EvaluatedScalingMetric.of(Double.NaN),
ScalingMetric.MANAGED_MEMORY_USED,
EvaluatedScalingMetric.of(Double.NaN),
ScalingMetric.METASPACE_MEMORY_USED,
EvaluatedScalingMetric.of(Double.NaN),
ScalingMetric.NUM_TASK_SLOTS_USED,
EvaluatedScalingMetric.of(Double.NaN)),
ScalingMetricEvaluator.evaluateGlobalMetrics(globalMetrics));
globalMetrics.put(
Instant.now(),
new CollectedMetrics(
Map.of(),
Map.of(
ScalingMetric.HEAP_MAX_USAGE_RATIO,
0.5,
ScalingMetric.GC_PRESSURE,
0.6,
ScalingMetric.HEAP_MEMORY_USED,
512.,
ScalingMetric.MANAGED_MEMORY_USED,
420.,
ScalingMetric.METASPACE_MEMORY_USED,
110.)));
assertEquals(
Map.of(
ScalingMetric.HEAP_MAX_USAGE_RATIO,
new EvaluatedScalingMetric(0.5, 0.5),
ScalingMetric.GC_PRESSURE,
EvaluatedScalingMetric.of(0.6),
ScalingMetric.HEAP_MEMORY_USED,
new EvaluatedScalingMetric(512, 512),
ScalingMetric.MANAGED_MEMORY_USED,
new EvaluatedScalingMetric(420, 420),
ScalingMetric.METASPACE_MEMORY_USED,
new EvaluatedScalingMetric(110, 110),
ScalingMetric.NUM_TASK_SLOTS_USED,
EvaluatedScalingMetric.of(Double.NaN)),
ScalingMetricEvaluator.evaluateGlobalMetrics(globalMetrics));
globalMetrics.put(
Instant.now(),
new CollectedMetrics(
Map.of(),
Map.of(
ScalingMetric.HEAP_MAX_USAGE_RATIO,
0.7,
ScalingMetric.GC_PRESSURE,
0.8,
ScalingMetric.HEAP_MEMORY_USED,
1024.,
ScalingMetric.MANAGED_MEMORY_USED,
840.,
ScalingMetric.METASPACE_MEMORY_USED,
220.,
ScalingMetric.NUM_TASK_SLOTS_USED,
42.)));
assertEquals(
Map.of(
ScalingMetric.HEAP_MAX_USAGE_RATIO,
new EvaluatedScalingMetric(0.7, 0.6),
ScalingMetric.GC_PRESSURE,
EvaluatedScalingMetric.of(0.8),
ScalingMetric.HEAP_MEMORY_USED,
new EvaluatedScalingMetric(1024., 768.),
ScalingMetric.MANAGED_MEMORY_USED,
new EvaluatedScalingMetric(840., 630.),
ScalingMetric.METASPACE_MEMORY_USED,
new EvaluatedScalingMetric(220., 165.),
ScalingMetric.NUM_TASK_SLOTS_USED,
EvaluatedScalingMetric.of(42.)),
ScalingMetricEvaluator.evaluateGlobalMetrics(globalMetrics));
}
@Test
public void testZeroValuesForRatesOrBusyness() {
assertInfiniteTpr(0, 0);
assertInfiniteTpr(0, 1);
assertInfiniteTpr(1, 0);
assertInfiniteTpr(Double.NaN, 0);
}
private static void assertInfiniteTpr(double busyTime, long inputRate) {
assertEquals(
Double.POSITIVE_INFINITY,
ScalingMetricEvaluator.computeTrueProcessingRate(
busyTime,
inputRate,
new TreeMap<>(),
new JobVertexID(),
new Configuration()));
}
@Test
public void testBusyTimeEvaluation() {
var v = new JobVertexID();
var conf = new Configuration();
var metricHistory = new TreeMap<Instant, CollectedMetrics>();
metricHistory.put(
Instant.ofEpochMilli(1000),
new CollectedMetrics(
Map.of(
v,
Map.of(
ScalingMetric.LOAD, 0.2,
ScalingMetric.ACCUMULATED_BUSY_TIME, 10000.)),
Map.of()));
metricHistory.put(
Instant.ofEpochMilli(2000),
new CollectedMetrics(
Map.of(
v,
Map.of(
ScalingMetric.LOAD, 0.3,
ScalingMetric.ACCUMULATED_BUSY_TIME, 10200.)),
Map.of()));
metricHistory.put(
Instant.ofEpochMilli(3000),
new CollectedMetrics(
Map.of(
v,
Map.of(
ScalingMetric.LOAD, 0.4,
ScalingMetric.ACCUMULATED_BUSY_TIME, 10400.)),
Map.of()));
// With MAX or MIN we should compute from LOAD only, parallelism should not matter
conf.set(AutoScalerOptions.BUSY_TIME_AGGREGATOR, MetricAggregator.MAX);
assertEquals(300., ScalingMetricEvaluator.computeBusyTimeAvg(conf, metricHistory, v, 2));
assertEquals(300., ScalingMetricEvaluator.computeBusyTimeAvg(conf, metricHistory, v, 0));
conf.set(AutoScalerOptions.BUSY_TIME_AGGREGATOR, MetricAggregator.MIN);
assertEquals(300., ScalingMetricEvaluator.computeBusyTimeAvg(conf, metricHistory, v, 2));
assertEquals(300., ScalingMetricEvaluator.computeBusyTimeAvg(conf, metricHistory, v, 0));
conf.set(AutoScalerOptions.BUSY_TIME_AGGREGATOR, MetricAggregator.AVG);
// With AVG we compute from accumulated busy time
// Diff 400 over 2 seconds -> 200 / second (for the whole job -> we need to divide for
// parallelism)
assertEquals(100., ScalingMetricEvaluator.computeBusyTimeAvg(conf, metricHistory, v, 2));
assertEquals(200., ScalingMetricEvaluator.computeBusyTimeAvg(conf, metricHistory, v, 1));
}
@Test
public void testComputableOutputRatios() {
var source1 = new JobVertexID();
var source2 = new JobVertexID();
var op1 = new JobVertexID();
var sink1 = new JobVertexID();
var topology =
new JobTopology(
new VertexInfo(source1, Collections.emptyMap(), 1, 1),
new VertexInfo(source2, Collections.emptyMap(), 1, 1),
new VertexInfo(op1, Map.of(source1, REBALANCE, source2, REBALANCE), 1, 1),
new VertexInfo(sink1, Map.of(op1, REBALANCE), 1, 1));
var metricHistory = new TreeMap<Instant, CollectedMetrics>();
metricHistory.put(
Instant.ofEpochMilli(1000),
new CollectedMetrics(
Map.of(
source1,
Map.of(
ScalingMetric.NUM_RECORDS_IN, 100.,
ScalingMetric.NUM_RECORDS_OUT, 100.),
source2,
Map.of(
ScalingMetric.NUM_RECORDS_IN, 100.,
ScalingMetric.NUM_RECORDS_OUT, 100.),
op1,
Map.of(ScalingMetric.NUM_RECORDS_IN, 100.),
sink1,
Map.of(ScalingMetric.NUM_RECORDS_IN, 100.)),
Map.of()));
metricHistory.put(
Instant.ofEpochMilli(2000),
new CollectedMetrics(
Map.of(
source1,
Map.of(
ScalingMetric.NUM_RECORDS_IN, 200.,
ScalingMetric.NUM_RECORDS_OUT, 300.),
source2,
Map.of(
ScalingMetric.NUM_RECORDS_IN, 200.,
ScalingMetric.NUM_RECORDS_OUT, 150.),
op1,
Map.of(ScalingMetric.NUM_RECORDS_IN, 350.),
sink1,
Map.of(ScalingMetric.NUM_RECORDS_IN, 150.)),
Map.of()));
assertEquals(
2.,
ScalingMetricEvaluator.computeEdgeOutputRatio(
source1, op1, topology, metricHistory));
assertEquals(
0.5,
ScalingMetricEvaluator.computeEdgeOutputRatio(
source2, op1, topology, metricHistory));
assertEquals(
0.2,
ScalingMetricEvaluator.computeEdgeOutputRatio(op1, sink1, topology, metricHistory));
}
@Test
public void testOutputRatioFallbackToOutPerSecond() {
var source1 = new JobVertexID();
var source2 = new JobVertexID();
var op1 = new JobVertexID();
var op2 = new JobVertexID();
var topology =
new JobTopology(
new VertexInfo(source1, Collections.emptyMap(), 1, 1),
new VertexInfo(source2, Collections.emptyMap(), 1, 1),
new VertexInfo(op1, Map.of(source1, REBALANCE, source2, REBALANCE), 1, 1),
new VertexInfo(op2, Map.of(source1, REBALANCE, source2, REBALANCE), 1, 1));
var metricHistory = new TreeMap<Instant, CollectedMetrics>();
metricHistory.put(
Instant.ofEpochMilli(1000),
new CollectedMetrics(
Map.of(
source1,
Map.of(
ScalingMetric.NUM_RECORDS_IN, 0.,
ScalingMetric.NUM_RECORDS_OUT, 0.),
source2,
Map.of(
ScalingMetric.NUM_RECORDS_IN, 0.,
ScalingMetric.NUM_RECORDS_OUT, 0.)),
Map.of()));
metricHistory.put(
Instant.ofEpochMilli(2000),
new CollectedMetrics(
Map.of(
source1,
Map.of(
ScalingMetric.NUM_RECORDS_IN, 100.,
ScalingMetric.NUM_RECORDS_OUT, 200.),
source2,
Map.of(
ScalingMetric.NUM_RECORDS_IN, 100.,
ScalingMetric.NUM_RECORDS_OUT, 50.)),
Map.of()));
assertEquals(
2.,
ScalingMetricEvaluator.computeEdgeOutputRatio(
source1, op1, topology, metricHistory));
assertEquals(
0.5,
ScalingMetricEvaluator.computeEdgeOutputRatio(
source2, op1, topology, metricHistory));
assertEquals(
2.,
ScalingMetricEvaluator.computeEdgeOutputRatio(
source1, op2, topology, metricHistory));
assertEquals(
0.5,
ScalingMetricEvaluator.computeEdgeOutputRatio(
source2, op2, topology, metricHistory));
}
@Test
public void getRateTest() {
var m1 = ScalingMetric.NUM_RECORDS_IN;
var m2 = ScalingMetric.NUM_RECORDS_OUT;
var v1 = new JobVertexID();
var v2 = new JobVertexID();
var history = new TreeMap<Instant, CollectedMetrics>();
history.put(
Instant.ofEpochMilli(1000),
new CollectedMetrics(Map.of(v1, Map.of(m1, 0.), v2, Map.of()), null));
history.put(
Instant.ofEpochMilli(2000),
new CollectedMetrics(Map.of(v1, Map.of(m1, 0., m2, 10.), v2, Map.of()), null));
history.put(
Instant.ofEpochMilli(3000),
new CollectedMetrics(
Map.of(v1, Map.of(m1, 4., m2, 20.), v2, Map.of(m1, 1.)), null));
assertEquals(2, ScalingMetricEvaluator.getRate(m1, v1, history));
assertEquals(10., ScalingMetricEvaluator.getRate(m2, v1, history));
assertEquals(Double.NaN, ScalingMetricEvaluator.getRate(m1, v2, history));
assertEquals(Double.NaN, ScalingMetricEvaluator.getRate(m2, v2, history));
}
private Tuple2<Double, Double> getThresholds(
double inputTargetRate, double catchUpRate, Configuration conf) {
return getThresholds(inputTargetRate, catchUpRate, conf, false);
}
private Tuple2<Double, Double> getThresholds(
double inputTargetRate, double catchUpRate, Duration restartTime, Configuration conf) {
return getThresholds(inputTargetRate, catchUpRate, restartTime, conf, false);
}
private Tuple2<Double, Double> getThresholds(
double inputTargetRate, double catchUpRate, Configuration conf, boolean catchingUp) {
var restartTime = conf.get(AutoScalerOptions.RESTART_TIME);
return getThresholds(inputTargetRate, catchUpRate, restartTime, conf, catchingUp);
}
private Tuple2<Double, Double> getThresholds(
double inputTargetRate,
double catchUpRate,
Duration restartTime,
Configuration conf,
boolean catchingUp) {
var map = new HashMap<ScalingMetric, EvaluatedScalingMetric>();
map.put(ScalingMetric.TARGET_DATA_RATE, EvaluatedScalingMetric.avg(inputTargetRate));
map.put(ScalingMetric.CATCH_UP_DATA_RATE, EvaluatedScalingMetric.of(catchUpRate));
ScalingMetricEvaluator.computeProcessingRateThresholds(map, conf, catchingUp, restartTime);
return Tuple2.of(
map.get(ScalingMetric.SCALE_UP_RATE_THRESHOLD).getCurrent(),
map.get(ScalingMetric.SCALE_DOWN_RATE_THRESHOLD).getCurrent());
}
}
|
apache/hadoop | 36,823 | hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/io/TestSequenceFile.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hadoop.io;
import java.io.*;
import java.util.*;
import org.apache.hadoop.fs.*;
import org.apache.hadoop.io.SequenceFile.CompressionType;
import org.apache.hadoop.io.SequenceFile.Metadata;
import org.apache.hadoop.io.compress.CompressionCodec;
import org.apache.hadoop.io.compress.DefaultCodec;
import org.apache.hadoop.io.serializer.Deserializer;
import org.apache.hadoop.io.serializer.Serialization;
import org.apache.hadoop.io.serializer.Serializer;
import org.apache.hadoop.io.serializer.avro.AvroReflectSerialization;
import org.apache.hadoop.test.GenericTestUtils;
import org.apache.hadoop.util.ReflectionUtils;
import org.apache.hadoop.conf.*;
import org.junit.jupiter.api.Test;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.Assertions.fail;
import static org.mockito.Mockito.spy;
import static org.mockito.Mockito.verify;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/** Support for flat files of binary key/value pairs. */
public class TestSequenceFile {
private static final Logger LOG =
LoggerFactory.getLogger(TestSequenceFile.class);
private Configuration conf = new Configuration();
/** Unit tests for SequenceFile. */
@Test
public void testZlibSequenceFile() throws Exception {
LOG.info("Testing SequenceFile with DefaultCodec");
compressedSeqFileTest(new DefaultCodec());
LOG.info("Successfully tested SequenceFile with DefaultCodec");
}
@SuppressWarnings("deprecation")
public void testSorterProperties() throws IOException {
// Test to ensure that deprecated properties have no default
// references anymore.
Configuration config = new Configuration();
assertNull(config.get(CommonConfigurationKeys.IO_SORT_MB_KEY),
"The deprecated sort memory property "
+ CommonConfigurationKeys.IO_SORT_MB_KEY
+ " must not exist in any core-*.xml files.");
assertNull(config.get(CommonConfigurationKeys.IO_SORT_FACTOR_KEY),
"The deprecated sort factor property "
+ CommonConfigurationKeys.IO_SORT_FACTOR_KEY
+ " must not exist in any core-*.xml files.");
// Test deprecated property honoring
// Set different values for old and new property names
// and compare which one gets loaded
config = new Configuration();
FileSystem fs = FileSystem.get(config);
config.setInt(CommonConfigurationKeys.IO_SORT_MB_KEY, 10);
config.setInt(CommonConfigurationKeys.IO_SORT_FACTOR_KEY, 10);
config.setInt(CommonConfigurationKeys.SEQ_IO_SORT_MB_KEY, 20);
config.setInt(CommonConfigurationKeys.SEQ_IO_SORT_FACTOR_KEY, 20);
SequenceFile.Sorter sorter = new SequenceFile.Sorter(
fs, Text.class, Text.class, config);
assertEquals(10 * 1024 * 1024, sorter.getMemory(),
"Deprecated memory conf must be honored over newer property");
assertEquals(10, sorter.getFactor(),
"Deprecated factor conf must be honored over newer property");
// Test deprecated properties (graceful deprecation)
config = new Configuration();
fs = FileSystem.get(config);
config.setInt(CommonConfigurationKeys.IO_SORT_MB_KEY, 10);
config.setInt(CommonConfigurationKeys.IO_SORT_FACTOR_KEY, 10);
sorter = new SequenceFile.Sorter(
fs, Text.class, Text.class, config);
assertEquals(10 * 1024 * 1024, // In bytes
sorter.getMemory(), "Deprecated memory property "
+ CommonConfigurationKeys.IO_SORT_MB_KEY
+ " must get properly applied.");
assertEquals(10,
sorter.getFactor(), "Deprecated sort factor property "
+ CommonConfigurationKeys.IO_SORT_FACTOR_KEY
+ " must get properly applied.");
// Test regular properties (graceful deprecation)
config = new Configuration();
fs = FileSystem.get(config);
config.setInt(CommonConfigurationKeys.SEQ_IO_SORT_MB_KEY, 20);
config.setInt(CommonConfigurationKeys.SEQ_IO_SORT_FACTOR_KEY, 20);
sorter = new SequenceFile.Sorter(
fs, Text.class, Text.class, config);
assertEquals(20 * 1024 * 1024, // In bytes
sorter.getMemory(), "Memory property "
+ CommonConfigurationKeys.SEQ_IO_SORT_MB_KEY
+ " must get properly applied if present.");
assertEquals(20, sorter.getFactor(),
"Merge factor property "
+ CommonConfigurationKeys.SEQ_IO_SORT_FACTOR_KEY
+ " must get properly applied if present.");
}
public void compressedSeqFileTest(CompressionCodec codec) throws Exception {
int count = 1024 * 10;
int megabytes = 1;
int factor = 5;
Path file = new Path(GenericTestUtils.getTempPath("test.seq"));
Path recordCompressedFile = new Path(GenericTestUtils.getTempPath(
"test.rc.seq"));
Path blockCompressedFile = new Path(GenericTestUtils.getTempPath(
"test.bc.seq"));
int seed = new Random().nextInt();
LOG.info("Seed = " + seed);
FileSystem fs = FileSystem.getLocal(conf);
try {
// SequenceFile.Writer
writeTest(fs, count, seed, file, CompressionType.NONE, null);
readTest(fs, count, seed, file);
sortTest(fs, count, megabytes, factor, false, file);
checkSort(fs, count, seed, file);
sortTest(fs, count, megabytes, factor, true, file);
checkSort(fs, count, seed, file);
mergeTest(fs, count, seed, file, CompressionType.NONE, false,
factor, megabytes);
checkSort(fs, count, seed, file);
mergeTest(fs, count, seed, file, CompressionType.NONE, true,
factor, megabytes);
checkSort(fs, count, seed, file);
// SequenceFile.RecordCompressWriter
writeTest(fs, count, seed, recordCompressedFile, CompressionType.RECORD,
codec);
readTest(fs, count, seed, recordCompressedFile);
sortTest(fs, count, megabytes, factor, false, recordCompressedFile);
checkSort(fs, count, seed, recordCompressedFile);
sortTest(fs, count, megabytes, factor, true, recordCompressedFile);
checkSort(fs, count, seed, recordCompressedFile);
mergeTest(fs, count, seed, recordCompressedFile,
CompressionType.RECORD, false, factor, megabytes);
checkSort(fs, count, seed, recordCompressedFile);
mergeTest(fs, count, seed, recordCompressedFile,
CompressionType.RECORD, true, factor, megabytes);
checkSort(fs, count, seed, recordCompressedFile);
// SequenceFile.BlockCompressWriter
writeTest(fs, count, seed, blockCompressedFile, CompressionType.BLOCK,
codec);
readTest(fs, count, seed, blockCompressedFile);
sortTest(fs, count, megabytes, factor, false, blockCompressedFile);
checkSort(fs, count, seed, blockCompressedFile);
sortTest(fs, count, megabytes, factor, true, blockCompressedFile);
checkSort(fs, count, seed, blockCompressedFile);
mergeTest(fs, count, seed, blockCompressedFile, CompressionType.BLOCK,
false, factor, megabytes);
checkSort(fs, count, seed, blockCompressedFile);
mergeTest(fs, count, seed, blockCompressedFile, CompressionType.BLOCK,
true, factor, megabytes);
checkSort(fs, count, seed, blockCompressedFile);
} finally {
fs.close();
}
}
@SuppressWarnings("deprecation")
private void writeTest(FileSystem fs, int count, int seed, Path file,
CompressionType compressionType, CompressionCodec codec)
throws IOException {
fs.delete(file, true);
LOG.info("creating " + count + " records with " + compressionType +
" compression");
SequenceFile.Writer writer =
SequenceFile.createWriter(fs, conf, file,
RandomDatum.class, RandomDatum.class, compressionType, codec);
RandomDatum.Generator generator = new RandomDatum.Generator(seed);
for (int i = 0; i < count; i++) {
generator.next();
RandomDatum key = generator.getKey();
RandomDatum value = generator.getValue();
writer.append(key, value);
}
writer.close();
}
@SuppressWarnings("deprecation")
private void readTest(FileSystem fs, int count, int seed, Path file)
throws IOException {
LOG.debug("reading " + count + " records");
SequenceFile.Reader reader = new SequenceFile.Reader(fs, file, conf);
RandomDatum.Generator generator = new RandomDatum.Generator(seed);
RandomDatum k = new RandomDatum();
RandomDatum v = new RandomDatum();
DataOutputBuffer rawKey = new DataOutputBuffer();
SequenceFile.ValueBytes rawValue = reader.createValueBytes();
for (int i = 0; i < count; i++) {
generator.next();
RandomDatum key = generator.getKey();
RandomDatum value = generator.getValue();
try {
if ((i%5) == 0) {
// Testing 'raw' apis
rawKey.reset();
reader.nextRaw(rawKey, rawValue);
} else {
// Testing 'non-raw' apis
if ((i%2) == 0) {
reader.next(k);
reader.getCurrentValue(v);
} else {
reader.next(k, v);
}
// Check
if (!k.equals(key))
throw new RuntimeException("wrong key at " + i);
if (!v.equals(value))
throw new RuntimeException("wrong value at " + i);
}
} catch (IOException ioe) {
LOG.info("Problem on row " + i);
LOG.info("Expected key = " + key);
LOG.info("Expected len = " + key.getLength());
LOG.info("Actual key = " + k);
LOG.info("Actual len = " + k.getLength());
LOG.info("Expected value = " + value);
LOG.info("Expected len = " + value.getLength());
LOG.info("Actual value = " + v);
LOG.info("Actual len = " + v.getLength());
LOG.info("Key equals: " + k.equals(key));
LOG.info("value equals: " + v.equals(value));
throw ioe;
}
}
reader.close();
}
private void sortTest(FileSystem fs, int count, int megabytes,
int factor, boolean fast, Path file)
throws IOException {
fs.delete(new Path(file+".sorted"), true);
SequenceFile.Sorter sorter = newSorter(fs, fast, megabytes, factor);
LOG.debug("sorting " + count + " records");
sorter.sort(file, file.suffix(".sorted"));
LOG.info("done sorting " + count + " debug");
}
@SuppressWarnings("deprecation")
private void checkSort(FileSystem fs, int count, int seed, Path file)
throws IOException {
LOG.info("sorting " + count + " records in memory for debug");
RandomDatum.Generator generator = new RandomDatum.Generator(seed);
SortedMap<RandomDatum, RandomDatum> map =
new TreeMap<RandomDatum, RandomDatum>();
for (int i = 0; i < count; i++) {
generator.next();
RandomDatum key = generator.getKey();
RandomDatum value = generator.getValue();
map.put(key, value);
}
LOG.debug("checking order of " + count + " records");
RandomDatum k = new RandomDatum();
RandomDatum v = new RandomDatum();
Iterator<Map.Entry<RandomDatum, RandomDatum>> iterator =
map.entrySet().iterator();
SequenceFile.Reader reader =
new SequenceFile.Reader(fs, file.suffix(".sorted"), conf);
for (int i = 0; i < count; i++) {
Map.Entry<RandomDatum, RandomDatum> entry = iterator.next();
RandomDatum key = entry.getKey();
RandomDatum value = entry.getValue();
reader.next(k, v);
if (!k.equals(key))
throw new RuntimeException("wrong key at " + i);
if (!v.equals(value))
throw new RuntimeException("wrong value at " + i);
}
reader.close();
LOG.debug("sucessfully checked " + count + " records");
}
@SuppressWarnings("deprecation")
private void mergeTest(FileSystem fs, int count, int seed, Path file,
CompressionType compressionType,
boolean fast, int factor, int megabytes)
throws IOException {
LOG.debug("creating "+factor+" files with "+count/factor+" records");
SequenceFile.Writer[] writers = new SequenceFile.Writer[factor];
Path[] names = new Path[factor];
Path[] sortedNames = new Path[factor];
for (int i = 0; i < factor; i++) {
names[i] = file.suffix("."+i);
sortedNames[i] = names[i].suffix(".sorted");
fs.delete(names[i], true);
fs.delete(sortedNames[i], true);
writers[i] = SequenceFile.createWriter(fs, conf, names[i],
RandomDatum.class, RandomDatum.class, compressionType);
}
RandomDatum.Generator generator = new RandomDatum.Generator(seed);
for (int i = 0; i < count; i++) {
generator.next();
RandomDatum key = generator.getKey();
RandomDatum value = generator.getValue();
writers[i%factor].append(key, value);
}
for (int i = 0; i < factor; i++)
writers[i].close();
for (int i = 0; i < factor; i++) {
LOG.debug("sorting file " + i + " with " + count/factor + " records");
newSorter(fs, fast, megabytes, factor).sort(names[i], sortedNames[i]);
}
LOG.info("merging " + factor + " files with " + count/factor + " debug");
fs.delete(new Path(file+".sorted"), true);
newSorter(fs, fast, megabytes, factor)
.merge(sortedNames, file.suffix(".sorted"));
}
private SequenceFile.Sorter newSorter(FileSystem fs,
boolean fast,
int megabytes, int factor) {
SequenceFile.Sorter sorter =
fast
? new SequenceFile.Sorter(fs, new RandomDatum.Comparator(),
RandomDatum.class, RandomDatum.class, conf)
: new SequenceFile.Sorter(fs, RandomDatum.class, RandomDatum.class, conf);
sorter.setMemory(megabytes * 1024*1024);
sorter.setFactor(factor);
return sorter;
}
/** Unit tests for SequenceFile metadata. */
@Test
public void testSequenceFileMetadata() throws Exception {
LOG.info("Testing SequenceFile with metadata");
int count = 1024 * 10;
CompressionCodec codec = new DefaultCodec();
Path file = new Path(GenericTestUtils.getTempPath("test.seq.metadata"));
Path sortedFile = new Path(GenericTestUtils.getTempPath(
"test.sorted.seq.metadata"));
Path recordCompressedFile = new Path(GenericTestUtils.getTempPath(
"test.rc.seq.metadata"));
Path blockCompressedFile = new Path(GenericTestUtils.getTempPath(
"test.bc.seq.metadata"));
FileSystem fs = FileSystem.getLocal(conf);
SequenceFile.Metadata theMetadata = new SequenceFile.Metadata();
theMetadata.set(new Text("name_1"), new Text("value_1"));
theMetadata.set(new Text("name_2"), new Text("value_2"));
theMetadata.set(new Text("name_3"), new Text("value_3"));
theMetadata.set(new Text("name_4"), new Text("value_4"));
int seed = new Random().nextInt();
try {
// SequenceFile.Writer
writeMetadataTest(fs, count, seed, file, CompressionType.NONE, null, theMetadata);
SequenceFile.Metadata aMetadata = readMetadata(fs, file);
if (!theMetadata.equals(aMetadata)) {
LOG.info("The original metadata:\n" + theMetadata.toString());
LOG.info("The retrieved metadata:\n" + aMetadata.toString());
throw new RuntimeException("metadata not match: " + 1);
}
// SequenceFile.RecordCompressWriter
writeMetadataTest(fs, count, seed, recordCompressedFile, CompressionType.RECORD,
codec, theMetadata);
aMetadata = readMetadata(fs, recordCompressedFile);
if (!theMetadata.equals(aMetadata)) {
LOG.info("The original metadata:\n" + theMetadata.toString());
LOG.info("The retrieved metadata:\n" + aMetadata.toString());
throw new RuntimeException("metadata not match: " + 2);
}
// SequenceFile.BlockCompressWriter
writeMetadataTest(fs, count, seed, blockCompressedFile, CompressionType.BLOCK,
codec, theMetadata);
aMetadata =readMetadata(fs, blockCompressedFile);
if (!theMetadata.equals(aMetadata)) {
LOG.info("The original metadata:\n" + theMetadata.toString());
LOG.info("The retrieved metadata:\n" + aMetadata.toString());
throw new RuntimeException("metadata not match: " + 3);
}
// SequenceFile.Sorter
sortMetadataTest(fs, file, sortedFile, theMetadata);
aMetadata = readMetadata(fs, recordCompressedFile);
if (!theMetadata.equals(aMetadata)) {
LOG.info("The original metadata:\n" + theMetadata.toString());
LOG.info("The retrieved metadata:\n" + aMetadata.toString());
throw new RuntimeException("metadata not match: " + 4);
}
} finally {
fs.close();
}
LOG.info("Successfully tested SequenceFile with metadata");
}
@SuppressWarnings("deprecation")
private SequenceFile.Metadata readMetadata(FileSystem fs, Path file)
throws IOException {
LOG.info("reading file: " + file.toString());
SequenceFile.Reader reader = new SequenceFile.Reader(fs, file, conf);
SequenceFile.Metadata meta = reader.getMetadata();
reader.close();
return meta;
}
@SuppressWarnings("deprecation")
private void writeMetadataTest(FileSystem fs, int count, int seed, Path file,
CompressionType compressionType, CompressionCodec codec, SequenceFile.Metadata metadata)
throws IOException {
fs.delete(file, true);
LOG.info("creating " + count + " records with metadata and with " + compressionType +
" compression");
SequenceFile.Writer writer =
SequenceFile.createWriter(fs, conf, file,
RandomDatum.class, RandomDatum.class, compressionType, codec, null, metadata);
RandomDatum.Generator generator = new RandomDatum.Generator(seed);
for (int i = 0; i < count; i++) {
generator.next();
RandomDatum key = generator.getKey();
RandomDatum value = generator.getValue();
writer.append(key, value);
}
writer.close();
}
private void sortMetadataTest(FileSystem fs, Path unsortedFile, Path sortedFile, SequenceFile.Metadata metadata)
throws IOException {
fs.delete(sortedFile, true);
LOG.info("sorting: " + unsortedFile + " to: " + sortedFile);
final WritableComparator comparator = WritableComparator.get(RandomDatum.class);
SequenceFile.Sorter sorter = new SequenceFile.Sorter(fs, comparator, RandomDatum.class, RandomDatum.class, conf, metadata);
sorter.sort(new Path[] { unsortedFile }, sortedFile, false);
}
@SuppressWarnings("deprecation")
@Test
public void testClose() throws IOException {
Configuration conf = new Configuration();
LocalFileSystem fs = FileSystem.getLocal(conf);
// create a sequence file 1
Path path1 = new Path(GenericTestUtils.getTempPath("test1.seq"));
SequenceFile.Writer writer = SequenceFile.createWriter(fs, conf, path1,
Text.class, NullWritable.class, CompressionType.BLOCK);
writer.append(new Text("file1-1"), NullWritable.get());
writer.append(new Text("file1-2"), NullWritable.get());
writer.close();
Path path2 = new Path(GenericTestUtils.getTempPath("test2.seq"));
writer = SequenceFile.createWriter(fs, conf, path2, Text.class,
NullWritable.class, CompressionType.BLOCK);
writer.append(new Text("file2-1"), NullWritable.get());
writer.append(new Text("file2-2"), NullWritable.get());
writer.close();
// Create a reader which uses 4 BuiltInZLibInflater instances
SequenceFile.Reader reader = new SequenceFile.Reader(fs, path1, conf);
// Returns the 4 BuiltInZLibInflater instances to the CodecPool
reader.close();
// The second close _could_ erroneously returns the same
// 4 BuiltInZLibInflater instances to the CodecPool again
reader.close();
// The first reader gets 4 BuiltInZLibInflater instances from the CodecPool
SequenceFile.Reader reader1 = new SequenceFile.Reader(fs, path1, conf);
// read first value from reader1
Text text = new Text();
reader1.next(text);
assertEquals("file1-1", text.toString());
// The second reader _could_ get the same 4 BuiltInZLibInflater
// instances from the CodePool as reader1
SequenceFile.Reader reader2 = new SequenceFile.Reader(fs, path2, conf);
// read first value from reader2
reader2.next(text);
assertEquals("file2-1", text.toString());
// read second value from reader1
reader1.next(text);
assertEquals("file1-2", text.toString());
// read second value from reader2 (this throws an exception)
reader2.next(text);
assertEquals("file2-2", text.toString());
assertFalse(reader1.next(text));
assertFalse(reader2.next(text));
}
/**
* Test that makes sure the FileSystem passed to createWriter
* @throws Exception
*/
@SuppressWarnings("deprecation")
@Test
public void testCreateUsesFsArg() throws Exception {
FileSystem fs = FileSystem.getLocal(conf);
FileSystem spyFs = spy(fs);
Path p = new Path(GenericTestUtils.getTempPath("testCreateUsesFSArg.seq"));
SequenceFile.Writer writer = SequenceFile.createWriter(
spyFs, conf, p, NullWritable.class, NullWritable.class);
writer.close();
verify(spyFs).getDefaultReplication(p);
}
private static class TestFSDataInputStream extends FSDataInputStream {
private boolean closed = false;
private TestFSDataInputStream(InputStream in) throws IOException {
super(in);
}
@Override
public void close() throws IOException {
closed = true;
super.close();
}
public boolean isClosed() {
return closed;
}
}
@SuppressWarnings("deprecation")
@Test
public void testCloseForErroneousSequenceFile()
throws IOException {
Configuration conf = new Configuration();
LocalFileSystem fs = FileSystem.getLocal(conf);
// create an empty file (which is not a valid sequence file)
Path path = new Path(GenericTestUtils.getTempPath("broken.seq"));
fs.create(path).close();
// try to create SequenceFile.Reader
final TestFSDataInputStream[] openedFile = new TestFSDataInputStream[1];
try {
new SequenceFile.Reader(fs, path, conf) {
// this method is called by the SequenceFile.Reader constructor, overwritten, so we can access the opened file
@Override
protected FSDataInputStream openFile(FileSystem fs, Path file, int bufferSize, long length) throws IOException {
final InputStream in = super.openFile(fs, file, bufferSize, length);
openedFile[0] = new TestFSDataInputStream(in);
return openedFile[0];
}
};
fail("IOException expected.");
} catch (IOException expected) {}
assertNotNull(openedFile[0], path + " should have been opened.");
assertTrue(openedFile[0].isClosed(), "InputStream for " + path + " should have been closed.");
}
/**
* Test to makes sure zero length sequence file is handled properly while
* initializing.
*/
@Test
public void testInitZeroLengthSequenceFile() throws IOException {
Configuration conf = new Configuration();
LocalFileSystem fs = FileSystem.getLocal(conf);
// create an empty file (which is not a valid sequence file)
Path path = new Path(GenericTestUtils.getTempPath("zerolength.seq"));
fs.create(path).close();
try {
new SequenceFile.Reader(conf, SequenceFile.Reader.file(path));
fail("IOException expected.");
} catch (IOException expected) {
assertTrue(expected instanceof EOFException);
}
}
/**
* Test that makes sure createWriter succeeds on a file that was
* already created
* @throws IOException
*/
@SuppressWarnings("deprecation")
@Test
public void testCreateWriterOnExistingFile() throws IOException {
Configuration conf = new Configuration();
FileSystem fs = FileSystem.getLocal(conf);
Path name = new Path(new Path(GenericTestUtils.getTempPath(
"createWriterOnExistingFile")), "file");
fs.create(name);
SequenceFile.createWriter(fs, conf, name, RandomDatum.class,
RandomDatum.class, 512, (short) 1, 4096, false,
CompressionType.NONE, null, new Metadata());
}
@SuppressWarnings("deprecation")
@Test
public void testRecursiveSeqFileCreate() throws IOException {
FileSystem fs = FileSystem.getLocal(conf);
Path parentDir = new Path(GenericTestUtils.getTempPath(
"recursiveCreateDir"));
Path name = new Path(parentDir, "file");
boolean createParent = false;
try {
SequenceFile.createWriter(fs, conf, name, RandomDatum.class,
RandomDatum.class, 512, (short) 1, 4096, createParent,
CompressionType.NONE, null, new Metadata());
fail("Expected an IOException due to missing parent");
} catch (IOException ioe) {
// Expected
}
try {
createParent = true;
SequenceFile.createWriter(fs, conf, name, RandomDatum.class,
RandomDatum.class, 512, (short) 1, 4096, createParent,
CompressionType.NONE, null, new Metadata());
// should succeed, fails if exception thrown
} finally {
fs.deleteOnExit(parentDir);
fs.close();
}
}
@Test
public void testSerializationAvailability() throws IOException {
Configuration conf = new Configuration();
Path path = new Path(GenericTestUtils.getTempPath(
"serializationAvailability"));
// Check if any serializers aren't found.
try {
SequenceFile.createWriter(
conf,
SequenceFile.Writer.file(path),
SequenceFile.Writer.keyClass(String.class),
SequenceFile.Writer.valueClass(NullWritable.class));
// Note: This may also fail someday if JavaSerialization
// is activated by default.
fail("Must throw IOException for missing serializer for the Key class");
} catch (IOException e) {
assertTrue(e.getMessage().startsWith(
"Could not find a serializer for the Key class: '" +
String.class.getName() + "'."));
}
try {
SequenceFile.createWriter(
conf,
SequenceFile.Writer.file(path),
SequenceFile.Writer.keyClass(NullWritable.class),
SequenceFile.Writer.valueClass(String.class));
// Note: This may also fail someday if JavaSerialization
// is activated by default.
fail("Must throw IOException for missing serializer for the Value class");
} catch (IOException e) {
assertTrue(e.getMessage().startsWith(
"Could not find a serializer for the Value class: '" +
String.class.getName() + "'."));
}
// Write a simple file to test deserialization failures with
writeTest(FileSystem.get(conf), 1, 1, path, CompressionType.NONE, null);
// Remove Writable serializations, to enforce error.
conf.setStrings(CommonConfigurationKeys.IO_SERIALIZATIONS_KEY,
AvroReflectSerialization.class.getName());
// Now check if any deserializers aren't found.
try {
new SequenceFile.Reader(
conf,
SequenceFile.Reader.file(path));
fail("Must throw IOException for missing deserializer for the Key class");
} catch (IOException e) {
assertTrue(e.getMessage().startsWith(
"Could not find a deserializer for the Key class: '" +
RandomDatum.class.getName() + "'."));
}
}
@Test
public void testSequenceFileWriter() throws Exception {
Configuration conf = new Configuration();
// This test only works with Raw File System and not Local File System
FileSystem fs = FileSystem.getLocal(conf).getRaw();
Path p = new Path(GenericTestUtils
.getTempPath("testSequenceFileWriter.seq"));
try(SequenceFile.Writer writer = SequenceFile.createWriter(
fs, conf, p, LongWritable.class, Text.class)) {
assertThat(writer.hasCapability
(StreamCapabilities.HSYNC)).isEqualTo(true);
assertThat(writer.hasCapability(
StreamCapabilities.HFLUSH)).isEqualTo(true);
LongWritable key = new LongWritable();
key.set(1);
Text value = new Text();
value.set("somevalue");
writer.append(key, value);
writer.flush();
writer.hflush();
writer.hsync();
assertThat(fs.getFileStatus(p).getLen()).isGreaterThan(0);
}
}
@Test
public void testSerializationUsingWritableNameAlias() throws IOException {
Configuration config = new Configuration();
config.set(CommonConfigurationKeys.IO_SERIALIZATIONS_KEY, SimpleSerializer.class.getName());
Path path = new Path(System.getProperty("test.build.data", "."),
"SerializationUsingWritableNameAlias");
// write with the original serializable class
SequenceFile.Writer writer = SequenceFile.createWriter(
config,
SequenceFile.Writer.file(path),
SequenceFile.Writer.keyClass(SimpleSerializable.class),
SequenceFile.Writer.valueClass(SimpleSerializable.class));
int max = 10;
try {
SimpleSerializable val = new SimpleSerializable();
val.setId(-1);
for (int i = 0; i < max; i++) {
SimpleSerializable key = new SimpleSerializable();
key.setId(i);
writer.append(key, val);
}
} finally {
writer.close();
}
// override name so it gets forced to the new serializable
WritableName.setName(AnotherSimpleSerializable.class, SimpleSerializable.class.getName());
// read and expect our new serializable, and all the correct values read
SequenceFile.Reader reader = new SequenceFile.Reader(
config,
SequenceFile.Reader.file(path));
AnotherSimpleSerializable key = new AnotherSimpleSerializable();
int count = 0;
while (true) {
key = (AnotherSimpleSerializable) reader.next(key);
if (key == null) {
// make sure we exhausted all the ints we wrote
assertEquals(count, max);
break;
}
assertEquals(count++, key.getId());
}
}
public static class SimpleSerializable implements Serializable {
private int id;
public int getId() {
return id;
}
public void setId(int id) {
this.id = id;
}
}
public static class AnotherSimpleSerializable extends SimpleSerializable {
}
public static class SimpleSerializer implements Serialization<SimpleSerializable> {
@Override
public boolean accept(Class<?> c) {
return SimpleSerializable.class.isAssignableFrom(c);
}
@Override
public Serializer<SimpleSerializable> getSerializer(Class<SimpleSerializable> c) {
return new Serializer<SimpleSerializable>() {
private DataOutputStream out;
@Override
public void open(OutputStream out) throws IOException {
this.out = new DataOutputStream(out);
}
@Override
public void serialize(SimpleSerializable simpleSerializable) throws IOException {
out.writeInt(simpleSerializable.getId());
}
@Override
public void close() throws IOException {
out.close();
}
};
}
@Override
public Deserializer<SimpleSerializable> getDeserializer(Class<SimpleSerializable> c) {
return new Deserializer<SimpleSerializable>() {
private DataInputStream dis;
@Override
public void open(InputStream in) throws IOException {
dis = new DataInputStream(in);
}
@Override
public SimpleSerializable deserialize(SimpleSerializable simpleSerializable)
throws IOException {
simpleSerializable.setId(dis.readInt());
return simpleSerializable;
}
@Override
public void close() throws IOException {
dis.close();
}
};
}
}
/** For debugging and testing. */
public static void main(String[] args) throws Exception {
int count = 1024 * 1024;
int megabytes = 1;
int factor = 10;
boolean create = true;
boolean rwonly = false;
boolean check = false;
boolean fast = false;
boolean merge = false;
String compressType = "NONE";
String compressionCodec = "org.apache.hadoop.io.compress.DefaultCodec";
Path file = null;
int seed = new Random().nextInt();
String usage = "Usage: testsequencefile " +
"[-count N] " +
"[-seed #] [-check] [-compressType <NONE|RECORD|BLOCK>] " +
"-codec <compressionCodec> " +
"[[-rwonly] | {[-megabytes M] [-factor F] [-nocreate] [-fast] [-merge]}] " +
" file";
if (args.length == 0) {
System.err.println(usage);
System.exit(-1);
}
FileSystem fs = null;
try {
for (int i=0; i < args.length; ++i) { // parse command line
if (args[i] == null) {
continue;
} else if (args[i].equals("-count")) {
count = Integer.parseInt(args[++i]);
} else if (args[i].equals("-megabytes")) {
megabytes = Integer.parseInt(args[++i]);
} else if (args[i].equals("-factor")) {
factor = Integer.parseInt(args[++i]);
} else if (args[i].equals("-seed")) {
seed = Integer.parseInt(args[++i]);
} else if (args[i].equals("-rwonly")) {
rwonly = true;
} else if (args[i].equals("-nocreate")) {
create = false;
} else if (args[i].equals("-check")) {
check = true;
} else if (args[i].equals("-fast")) {
fast = true;
} else if (args[i].equals("-merge")) {
merge = true;
} else if (args[i].equals("-compressType")) {
compressType = args[++i];
} else if (args[i].equals("-codec")) {
compressionCodec = args[++i];
} else {
// file is required parameter
file = new Path(args[i]);
}
}
TestSequenceFile test = new TestSequenceFile();
fs = file.getFileSystem(test.conf);
LOG.info("count = " + count);
LOG.info("megabytes = " + megabytes);
LOG.info("factor = " + factor);
LOG.info("create = " + create);
LOG.info("seed = " + seed);
LOG.info("rwonly = " + rwonly);
LOG.info("check = " + check);
LOG.info("fast = " + fast);
LOG.info("merge = " + merge);
LOG.info("compressType = " + compressType);
LOG.info("compressionCodec = " + compressionCodec);
LOG.info("file = " + file);
if (rwonly && (!create || merge || fast)) {
System.err.println(usage);
System.exit(-1);
}
CompressionType compressionType =
CompressionType.valueOf(compressType);
CompressionCodec codec = (CompressionCodec)ReflectionUtils.newInstance(
test.conf.getClassByName(compressionCodec),
test.conf);
if (rwonly || (create && !merge)) {
test.writeTest(fs, count, seed, file, compressionType, codec);
test.readTest(fs, count, seed, file);
}
if (!rwonly) {
if (merge) {
test.mergeTest(fs, count, seed, file, compressionType,
fast, factor, megabytes);
} else {
test.sortTest(fs, count, megabytes, factor, fast, file);
}
}
if (check) {
test.checkSort(fs, count, seed, file);
}
} finally {
if (fs != null) {
fs.close();
}
}
}
}
|
googleapis/google-cloud-java | 36,870 | java-dialogflow-cx/google-cloud-dialogflow-cx/src/test/java/com/google/cloud/dialogflow/cx/v3/FlowsClientTest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.dialogflow.cx.v3;
import static com.google.cloud.dialogflow.cx.v3.FlowsClient.ListFlowsPagedResponse;
import static com.google.cloud.dialogflow.cx.v3.FlowsClient.ListLocationsPagedResponse;
import com.google.api.gax.core.NoCredentialsProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.testing.LocalChannelProvider;
import com.google.api.gax.grpc.testing.MockGrpcService;
import com.google.api.gax.grpc.testing.MockServiceHelper;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.InvalidArgumentException;
import com.google.api.gax.rpc.StatusCode;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.common.collect.Lists;
import com.google.longrunning.Operation;
import com.google.protobuf.AbstractMessage;
import com.google.protobuf.Any;
import com.google.protobuf.Empty;
import com.google.protobuf.FieldMask;
import com.google.protobuf.Timestamp;
import io.grpc.StatusRuntimeException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import javax.annotation.Generated;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@Generated("by gapic-generator-java")
public class FlowsClientTest {
private static MockFlows mockFlows;
private static MockLocations mockLocations;
private static MockServiceHelper mockServiceHelper;
private LocalChannelProvider channelProvider;
private FlowsClient client;
@BeforeClass
public static void startStaticServer() {
mockFlows = new MockFlows();
mockLocations = new MockLocations();
mockServiceHelper =
new MockServiceHelper(
UUID.randomUUID().toString(), Arrays.<MockGrpcService>asList(mockFlows, mockLocations));
mockServiceHelper.start();
}
@AfterClass
public static void stopServer() {
mockServiceHelper.stop();
}
@Before
public void setUp() throws IOException {
mockServiceHelper.reset();
channelProvider = mockServiceHelper.createChannelProvider();
FlowsSettings settings =
FlowsSettings.newBuilder()
.setTransportChannelProvider(channelProvider)
.setCredentialsProvider(NoCredentialsProvider.create())
.build();
client = FlowsClient.create(settings);
}
@After
public void tearDown() throws Exception {
client.close();
}
@Test
public void createFlowTest() throws Exception {
Flow expectedResponse =
Flow.newBuilder()
.setName(FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]").toString())
.setDisplayName("displayName1714148973")
.setDescription("description-1724546052")
.addAllTransitionRoutes(new ArrayList<TransitionRoute>())
.addAllEventHandlers(new ArrayList<EventHandler>())
.addAllTransitionRouteGroups(new ArrayList<String>())
.setNluSettings(NluSettings.newBuilder().build())
.setAdvancedSettings(AdvancedSettings.newBuilder().build())
.setKnowledgeConnectorSettings(KnowledgeConnectorSettings.newBuilder().build())
.setMultiLanguageSettings(Flow.MultiLanguageSettings.newBuilder().build())
.setLocked(true)
.build();
mockFlows.addResponse(expectedResponse);
AgentName parent = AgentName.of("[PROJECT]", "[LOCATION]", "[AGENT]");
Flow flow = Flow.newBuilder().build();
Flow actualResponse = client.createFlow(parent, flow);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateFlowRequest actualRequest = ((CreateFlowRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertEquals(flow, actualRequest.getFlow());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void createFlowExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
AgentName parent = AgentName.of("[PROJECT]", "[LOCATION]", "[AGENT]");
Flow flow = Flow.newBuilder().build();
client.createFlow(parent, flow);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void createFlowTest2() throws Exception {
Flow expectedResponse =
Flow.newBuilder()
.setName(FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]").toString())
.setDisplayName("displayName1714148973")
.setDescription("description-1724546052")
.addAllTransitionRoutes(new ArrayList<TransitionRoute>())
.addAllEventHandlers(new ArrayList<EventHandler>())
.addAllTransitionRouteGroups(new ArrayList<String>())
.setNluSettings(NluSettings.newBuilder().build())
.setAdvancedSettings(AdvancedSettings.newBuilder().build())
.setKnowledgeConnectorSettings(KnowledgeConnectorSettings.newBuilder().build())
.setMultiLanguageSettings(Flow.MultiLanguageSettings.newBuilder().build())
.setLocked(true)
.build();
mockFlows.addResponse(expectedResponse);
String parent = "parent-995424086";
Flow flow = Flow.newBuilder().build();
Flow actualResponse = client.createFlow(parent, flow);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateFlowRequest actualRequest = ((CreateFlowRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertEquals(flow, actualRequest.getFlow());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void createFlowExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
String parent = "parent-995424086";
Flow flow = Flow.newBuilder().build();
client.createFlow(parent, flow);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void deleteFlowTest() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
mockFlows.addResponse(expectedResponse);
FlowName name = FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]");
client.deleteFlow(name);
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteFlowRequest actualRequest = ((DeleteFlowRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void deleteFlowExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
FlowName name = FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]");
client.deleteFlow(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void deleteFlowTest2() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
mockFlows.addResponse(expectedResponse);
String name = "name3373707";
client.deleteFlow(name);
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteFlowRequest actualRequest = ((DeleteFlowRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void deleteFlowExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
String name = "name3373707";
client.deleteFlow(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listFlowsTest() throws Exception {
Flow responsesElement = Flow.newBuilder().build();
ListFlowsResponse expectedResponse =
ListFlowsResponse.newBuilder()
.setNextPageToken("")
.addAllFlows(Arrays.asList(responsesElement))
.build();
mockFlows.addResponse(expectedResponse);
AgentName parent = AgentName.of("[PROJECT]", "[LOCATION]", "[AGENT]");
ListFlowsPagedResponse pagedListResponse = client.listFlows(parent);
List<Flow> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getFlowsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListFlowsRequest actualRequest = ((ListFlowsRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listFlowsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
AgentName parent = AgentName.of("[PROJECT]", "[LOCATION]", "[AGENT]");
client.listFlows(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listFlowsTest2() throws Exception {
Flow responsesElement = Flow.newBuilder().build();
ListFlowsResponse expectedResponse =
ListFlowsResponse.newBuilder()
.setNextPageToken("")
.addAllFlows(Arrays.asList(responsesElement))
.build();
mockFlows.addResponse(expectedResponse);
String parent = "parent-995424086";
ListFlowsPagedResponse pagedListResponse = client.listFlows(parent);
List<Flow> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getFlowsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListFlowsRequest actualRequest = ((ListFlowsRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listFlowsExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
String parent = "parent-995424086";
client.listFlows(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getFlowTest() throws Exception {
Flow expectedResponse =
Flow.newBuilder()
.setName(FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]").toString())
.setDisplayName("displayName1714148973")
.setDescription("description-1724546052")
.addAllTransitionRoutes(new ArrayList<TransitionRoute>())
.addAllEventHandlers(new ArrayList<EventHandler>())
.addAllTransitionRouteGroups(new ArrayList<String>())
.setNluSettings(NluSettings.newBuilder().build())
.setAdvancedSettings(AdvancedSettings.newBuilder().build())
.setKnowledgeConnectorSettings(KnowledgeConnectorSettings.newBuilder().build())
.setMultiLanguageSettings(Flow.MultiLanguageSettings.newBuilder().build())
.setLocked(true)
.build();
mockFlows.addResponse(expectedResponse);
FlowName name = FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]");
Flow actualResponse = client.getFlow(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetFlowRequest actualRequest = ((GetFlowRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getFlowExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
FlowName name = FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]");
client.getFlow(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getFlowTest2() throws Exception {
Flow expectedResponse =
Flow.newBuilder()
.setName(FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]").toString())
.setDisplayName("displayName1714148973")
.setDescription("description-1724546052")
.addAllTransitionRoutes(new ArrayList<TransitionRoute>())
.addAllEventHandlers(new ArrayList<EventHandler>())
.addAllTransitionRouteGroups(new ArrayList<String>())
.setNluSettings(NluSettings.newBuilder().build())
.setAdvancedSettings(AdvancedSettings.newBuilder().build())
.setKnowledgeConnectorSettings(KnowledgeConnectorSettings.newBuilder().build())
.setMultiLanguageSettings(Flow.MultiLanguageSettings.newBuilder().build())
.setLocked(true)
.build();
mockFlows.addResponse(expectedResponse);
String name = "name3373707";
Flow actualResponse = client.getFlow(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetFlowRequest actualRequest = ((GetFlowRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getFlowExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
String name = "name3373707";
client.getFlow(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void updateFlowTest() throws Exception {
Flow expectedResponse =
Flow.newBuilder()
.setName(FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]").toString())
.setDisplayName("displayName1714148973")
.setDescription("description-1724546052")
.addAllTransitionRoutes(new ArrayList<TransitionRoute>())
.addAllEventHandlers(new ArrayList<EventHandler>())
.addAllTransitionRouteGroups(new ArrayList<String>())
.setNluSettings(NluSettings.newBuilder().build())
.setAdvancedSettings(AdvancedSettings.newBuilder().build())
.setKnowledgeConnectorSettings(KnowledgeConnectorSettings.newBuilder().build())
.setMultiLanguageSettings(Flow.MultiLanguageSettings.newBuilder().build())
.setLocked(true)
.build();
mockFlows.addResponse(expectedResponse);
Flow flow = Flow.newBuilder().build();
FieldMask updateMask = FieldMask.newBuilder().build();
Flow actualResponse = client.updateFlow(flow, updateMask);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
UpdateFlowRequest actualRequest = ((UpdateFlowRequest) actualRequests.get(0));
Assert.assertEquals(flow, actualRequest.getFlow());
Assert.assertEquals(updateMask, actualRequest.getUpdateMask());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void updateFlowExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
Flow flow = Flow.newBuilder().build();
FieldMask updateMask = FieldMask.newBuilder().build();
client.updateFlow(flow, updateMask);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void trainFlowTest() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
Operation resultOperation =
Operation.newBuilder()
.setName("trainFlowTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockFlows.addResponse(resultOperation);
FlowName name = FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]");
client.trainFlowAsync(name).get();
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
TrainFlowRequest actualRequest = ((TrainFlowRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void trainFlowExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
FlowName name = FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]");
client.trainFlowAsync(name).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void trainFlowTest2() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
Operation resultOperation =
Operation.newBuilder()
.setName("trainFlowTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockFlows.addResponse(resultOperation);
String name = "name3373707";
client.trainFlowAsync(name).get();
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
TrainFlowRequest actualRequest = ((TrainFlowRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void trainFlowExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
String name = "name3373707";
client.trainFlowAsync(name).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void validateFlowTest() throws Exception {
FlowValidationResult expectedResponse =
FlowValidationResult.newBuilder()
.setName(
FlowValidationResultName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]")
.toString())
.addAllValidationMessages(new ArrayList<ValidationMessage>())
.setUpdateTime(Timestamp.newBuilder().build())
.build();
mockFlows.addResponse(expectedResponse);
ValidateFlowRequest request =
ValidateFlowRequest.newBuilder()
.setName(FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]").toString())
.setLanguageCode("languageCode-2092349083")
.build();
FlowValidationResult actualResponse = client.validateFlow(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
ValidateFlowRequest actualRequest = ((ValidateFlowRequest) actualRequests.get(0));
Assert.assertEquals(request.getName(), actualRequest.getName());
Assert.assertEquals(request.getLanguageCode(), actualRequest.getLanguageCode());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void validateFlowExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
ValidateFlowRequest request =
ValidateFlowRequest.newBuilder()
.setName(FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]").toString())
.setLanguageCode("languageCode-2092349083")
.build();
client.validateFlow(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getFlowValidationResultTest() throws Exception {
FlowValidationResult expectedResponse =
FlowValidationResult.newBuilder()
.setName(
FlowValidationResultName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]")
.toString())
.addAllValidationMessages(new ArrayList<ValidationMessage>())
.setUpdateTime(Timestamp.newBuilder().build())
.build();
mockFlows.addResponse(expectedResponse);
FlowValidationResultName name =
FlowValidationResultName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]");
FlowValidationResult actualResponse = client.getFlowValidationResult(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetFlowValidationResultRequest actualRequest =
((GetFlowValidationResultRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getFlowValidationResultExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
FlowValidationResultName name =
FlowValidationResultName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]");
client.getFlowValidationResult(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getFlowValidationResultTest2() throws Exception {
FlowValidationResult expectedResponse =
FlowValidationResult.newBuilder()
.setName(
FlowValidationResultName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]")
.toString())
.addAllValidationMessages(new ArrayList<ValidationMessage>())
.setUpdateTime(Timestamp.newBuilder().build())
.build();
mockFlows.addResponse(expectedResponse);
String name = "name3373707";
FlowValidationResult actualResponse = client.getFlowValidationResult(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetFlowValidationResultRequest actualRequest =
((GetFlowValidationResultRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getFlowValidationResultExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
String name = "name3373707";
client.getFlowValidationResult(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void importFlowTest() throws Exception {
ImportFlowResponse expectedResponse =
ImportFlowResponse.newBuilder()
.setFlow(FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]").toString())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("importFlowTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockFlows.addResponse(resultOperation);
ImportFlowRequest request =
ImportFlowRequest.newBuilder()
.setParent(AgentName.of("[PROJECT]", "[LOCATION]", "[AGENT]").toString())
.setFlowImportStrategy(FlowImportStrategy.newBuilder().build())
.build();
ImportFlowResponse actualResponse = client.importFlowAsync(request).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
ImportFlowRequest actualRequest = ((ImportFlowRequest) actualRequests.get(0));
Assert.assertEquals(request.getParent(), actualRequest.getParent());
Assert.assertEquals(request.getFlowUri(), actualRequest.getFlowUri());
Assert.assertEquals(request.getFlowContent(), actualRequest.getFlowContent());
Assert.assertEquals(request.getImportOption(), actualRequest.getImportOption());
Assert.assertEquals(request.getFlowImportStrategy(), actualRequest.getFlowImportStrategy());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void importFlowExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
ImportFlowRequest request =
ImportFlowRequest.newBuilder()
.setParent(AgentName.of("[PROJECT]", "[LOCATION]", "[AGENT]").toString())
.setFlowImportStrategy(FlowImportStrategy.newBuilder().build())
.build();
client.importFlowAsync(request).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void exportFlowTest() throws Exception {
ExportFlowResponse expectedResponse = ExportFlowResponse.newBuilder().build();
Operation resultOperation =
Operation.newBuilder()
.setName("exportFlowTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockFlows.addResponse(resultOperation);
ExportFlowRequest request =
ExportFlowRequest.newBuilder()
.setName(FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]").toString())
.setFlowUri("flowUri-765815458")
.setIncludeReferencedFlows(true)
.build();
ExportFlowResponse actualResponse = client.exportFlowAsync(request).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
ExportFlowRequest actualRequest = ((ExportFlowRequest) actualRequests.get(0));
Assert.assertEquals(request.getName(), actualRequest.getName());
Assert.assertEquals(request.getFlowUri(), actualRequest.getFlowUri());
Assert.assertEquals(
request.getIncludeReferencedFlows(), actualRequest.getIncludeReferencedFlows());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void exportFlowExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
ExportFlowRequest request =
ExportFlowRequest.newBuilder()
.setName(FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]").toString())
.setFlowUri("flowUri-765815458")
.setIncludeReferencedFlows(true)
.build();
client.exportFlowAsync(request).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void listLocationsTest() throws Exception {
Location responsesElement = Location.newBuilder().build();
ListLocationsResponse expectedResponse =
ListLocationsResponse.newBuilder()
.setNextPageToken("")
.addAllLocations(Arrays.asList(responsesElement))
.build();
mockLocations.addResponse(expectedResponse);
ListLocationsRequest request =
ListLocationsRequest.newBuilder()
.setName("name3373707")
.setFilter("filter-1274492040")
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
ListLocationsPagedResponse pagedListResponse = client.listLocations(request);
List<Location> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getLocationsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockLocations.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListLocationsRequest actualRequest = ((ListLocationsRequest) actualRequests.get(0));
Assert.assertEquals(request.getName(), actualRequest.getName());
Assert.assertEquals(request.getFilter(), actualRequest.getFilter());
Assert.assertEquals(request.getPageSize(), actualRequest.getPageSize());
Assert.assertEquals(request.getPageToken(), actualRequest.getPageToken());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listLocationsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLocations.addException(exception);
try {
ListLocationsRequest request =
ListLocationsRequest.newBuilder()
.setName("name3373707")
.setFilter("filter-1274492040")
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
client.listLocations(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getLocationTest() throws Exception {
Location expectedResponse =
Location.newBuilder()
.setName("name3373707")
.setLocationId("locationId1541836720")
.setDisplayName("displayName1714148973")
.putAllLabels(new HashMap<String, String>())
.setMetadata(Any.newBuilder().build())
.build();
mockLocations.addResponse(expectedResponse);
GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
Location actualResponse = client.getLocation(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockLocations.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetLocationRequest actualRequest = ((GetLocationRequest) actualRequests.get(0));
Assert.assertEquals(request.getName(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getLocationExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLocations.addException(exception);
try {
GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
client.getLocation(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
}
|
googleapis/google-cloud-java | 36,959 | java-dataform/proto-google-cloud-dataform-v1/src/main/java/com/google/cloud/dataform/v1/FetchRepositoryHistoryResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dataform/v1/dataform.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dataform.v1;
/**
*
*
* <pre>
* `FetchRepositoryHistory` response message.
* </pre>
*
* Protobuf type {@code google.cloud.dataform.v1.FetchRepositoryHistoryResponse}
*/
public final class FetchRepositoryHistoryResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dataform.v1.FetchRepositoryHistoryResponse)
FetchRepositoryHistoryResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use FetchRepositoryHistoryResponse.newBuilder() to construct.
private FetchRepositoryHistoryResponse(
com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private FetchRepositoryHistoryResponse() {
commits_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new FetchRepositoryHistoryResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataform.v1.DataformProto
.internal_static_google_cloud_dataform_v1_FetchRepositoryHistoryResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataform.v1.DataformProto
.internal_static_google_cloud_dataform_v1_FetchRepositoryHistoryResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse.class,
com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse.Builder.class);
}
public static final int COMMITS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.dataform.v1.CommitLogEntry> commits_;
/**
*
*
* <pre>
* A list of commit logs, ordered by 'git log' default order.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.CommitLogEntry commits = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.dataform.v1.CommitLogEntry> getCommitsList() {
return commits_;
}
/**
*
*
* <pre>
* A list of commit logs, ordered by 'git log' default order.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.CommitLogEntry commits = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.dataform.v1.CommitLogEntryOrBuilder>
getCommitsOrBuilderList() {
return commits_;
}
/**
*
*
* <pre>
* A list of commit logs, ordered by 'git log' default order.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.CommitLogEntry commits = 1;</code>
*/
@java.lang.Override
public int getCommitsCount() {
return commits_.size();
}
/**
*
*
* <pre>
* A list of commit logs, ordered by 'git log' default order.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.CommitLogEntry commits = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dataform.v1.CommitLogEntry getCommits(int index) {
return commits_.get(index);
}
/**
*
*
* <pre>
* A list of commit logs, ordered by 'git log' default order.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.CommitLogEntry commits = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dataform.v1.CommitLogEntryOrBuilder getCommitsOrBuilder(int index) {
return commits_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < commits_.size(); i++) {
output.writeMessage(1, commits_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < commits_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, commits_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse)) {
return super.equals(obj);
}
com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse other =
(com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse) obj;
if (!getCommitsList().equals(other.getCommitsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getCommitsCount() > 0) {
hash = (37 * hash) + COMMITS_FIELD_NUMBER;
hash = (53 * hash) + getCommitsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* `FetchRepositoryHistory` response message.
* </pre>
*
* Protobuf type {@code google.cloud.dataform.v1.FetchRepositoryHistoryResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dataform.v1.FetchRepositoryHistoryResponse)
com.google.cloud.dataform.v1.FetchRepositoryHistoryResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dataform.v1.DataformProto
.internal_static_google_cloud_dataform_v1_FetchRepositoryHistoryResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dataform.v1.DataformProto
.internal_static_google_cloud_dataform_v1_FetchRepositoryHistoryResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse.class,
com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse.Builder.class);
}
// Construct using com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (commitsBuilder_ == null) {
commits_ = java.util.Collections.emptyList();
} else {
commits_ = null;
commitsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dataform.v1.DataformProto
.internal_static_google_cloud_dataform_v1_FetchRepositoryHistoryResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse getDefaultInstanceForType() {
return com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse build() {
com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse buildPartial() {
com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse result =
new com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse result) {
if (commitsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
commits_ = java.util.Collections.unmodifiableList(commits_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.commits_ = commits_;
} else {
result.commits_ = commitsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse) {
return mergeFrom((com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse other) {
if (other == com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse.getDefaultInstance())
return this;
if (commitsBuilder_ == null) {
if (!other.commits_.isEmpty()) {
if (commits_.isEmpty()) {
commits_ = other.commits_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureCommitsIsMutable();
commits_.addAll(other.commits_);
}
onChanged();
}
} else {
if (!other.commits_.isEmpty()) {
if (commitsBuilder_.isEmpty()) {
commitsBuilder_.dispose();
commitsBuilder_ = null;
commits_ = other.commits_;
bitField0_ = (bitField0_ & ~0x00000001);
commitsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getCommitsFieldBuilder()
: null;
} else {
commitsBuilder_.addAllMessages(other.commits_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.dataform.v1.CommitLogEntry m =
input.readMessage(
com.google.cloud.dataform.v1.CommitLogEntry.parser(), extensionRegistry);
if (commitsBuilder_ == null) {
ensureCommitsIsMutable();
commits_.add(m);
} else {
commitsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.dataform.v1.CommitLogEntry> commits_ =
java.util.Collections.emptyList();
private void ensureCommitsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
commits_ = new java.util.ArrayList<com.google.cloud.dataform.v1.CommitLogEntry>(commits_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dataform.v1.CommitLogEntry,
com.google.cloud.dataform.v1.CommitLogEntry.Builder,
com.google.cloud.dataform.v1.CommitLogEntryOrBuilder>
commitsBuilder_;
/**
*
*
* <pre>
* A list of commit logs, ordered by 'git log' default order.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.CommitLogEntry commits = 1;</code>
*/
public java.util.List<com.google.cloud.dataform.v1.CommitLogEntry> getCommitsList() {
if (commitsBuilder_ == null) {
return java.util.Collections.unmodifiableList(commits_);
} else {
return commitsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* A list of commit logs, ordered by 'git log' default order.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.CommitLogEntry commits = 1;</code>
*/
public int getCommitsCount() {
if (commitsBuilder_ == null) {
return commits_.size();
} else {
return commitsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* A list of commit logs, ordered by 'git log' default order.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.CommitLogEntry commits = 1;</code>
*/
public com.google.cloud.dataform.v1.CommitLogEntry getCommits(int index) {
if (commitsBuilder_ == null) {
return commits_.get(index);
} else {
return commitsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* A list of commit logs, ordered by 'git log' default order.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.CommitLogEntry commits = 1;</code>
*/
public Builder setCommits(int index, com.google.cloud.dataform.v1.CommitLogEntry value) {
if (commitsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCommitsIsMutable();
commits_.set(index, value);
onChanged();
} else {
commitsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A list of commit logs, ordered by 'git log' default order.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.CommitLogEntry commits = 1;</code>
*/
public Builder setCommits(
int index, com.google.cloud.dataform.v1.CommitLogEntry.Builder builderForValue) {
if (commitsBuilder_ == null) {
ensureCommitsIsMutable();
commits_.set(index, builderForValue.build());
onChanged();
} else {
commitsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of commit logs, ordered by 'git log' default order.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.CommitLogEntry commits = 1;</code>
*/
public Builder addCommits(com.google.cloud.dataform.v1.CommitLogEntry value) {
if (commitsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCommitsIsMutable();
commits_.add(value);
onChanged();
} else {
commitsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* A list of commit logs, ordered by 'git log' default order.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.CommitLogEntry commits = 1;</code>
*/
public Builder addCommits(int index, com.google.cloud.dataform.v1.CommitLogEntry value) {
if (commitsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCommitsIsMutable();
commits_.add(index, value);
onChanged();
} else {
commitsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* A list of commit logs, ordered by 'git log' default order.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.CommitLogEntry commits = 1;</code>
*/
public Builder addCommits(com.google.cloud.dataform.v1.CommitLogEntry.Builder builderForValue) {
if (commitsBuilder_ == null) {
ensureCommitsIsMutable();
commits_.add(builderForValue.build());
onChanged();
} else {
commitsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of commit logs, ordered by 'git log' default order.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.CommitLogEntry commits = 1;</code>
*/
public Builder addCommits(
int index, com.google.cloud.dataform.v1.CommitLogEntry.Builder builderForValue) {
if (commitsBuilder_ == null) {
ensureCommitsIsMutable();
commits_.add(index, builderForValue.build());
onChanged();
} else {
commitsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* A list of commit logs, ordered by 'git log' default order.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.CommitLogEntry commits = 1;</code>
*/
public Builder addAllCommits(
java.lang.Iterable<? extends com.google.cloud.dataform.v1.CommitLogEntry> values) {
if (commitsBuilder_ == null) {
ensureCommitsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, commits_);
onChanged();
} else {
commitsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* A list of commit logs, ordered by 'git log' default order.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.CommitLogEntry commits = 1;</code>
*/
public Builder clearCommits() {
if (commitsBuilder_ == null) {
commits_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
commitsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* A list of commit logs, ordered by 'git log' default order.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.CommitLogEntry commits = 1;</code>
*/
public Builder removeCommits(int index) {
if (commitsBuilder_ == null) {
ensureCommitsIsMutable();
commits_.remove(index);
onChanged();
} else {
commitsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* A list of commit logs, ordered by 'git log' default order.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.CommitLogEntry commits = 1;</code>
*/
public com.google.cloud.dataform.v1.CommitLogEntry.Builder getCommitsBuilder(int index) {
return getCommitsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* A list of commit logs, ordered by 'git log' default order.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.CommitLogEntry commits = 1;</code>
*/
public com.google.cloud.dataform.v1.CommitLogEntryOrBuilder getCommitsOrBuilder(int index) {
if (commitsBuilder_ == null) {
return commits_.get(index);
} else {
return commitsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* A list of commit logs, ordered by 'git log' default order.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.CommitLogEntry commits = 1;</code>
*/
public java.util.List<? extends com.google.cloud.dataform.v1.CommitLogEntryOrBuilder>
getCommitsOrBuilderList() {
if (commitsBuilder_ != null) {
return commitsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(commits_);
}
}
/**
*
*
* <pre>
* A list of commit logs, ordered by 'git log' default order.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.CommitLogEntry commits = 1;</code>
*/
public com.google.cloud.dataform.v1.CommitLogEntry.Builder addCommitsBuilder() {
return getCommitsFieldBuilder()
.addBuilder(com.google.cloud.dataform.v1.CommitLogEntry.getDefaultInstance());
}
/**
*
*
* <pre>
* A list of commit logs, ordered by 'git log' default order.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.CommitLogEntry commits = 1;</code>
*/
public com.google.cloud.dataform.v1.CommitLogEntry.Builder addCommitsBuilder(int index) {
return getCommitsFieldBuilder()
.addBuilder(index, com.google.cloud.dataform.v1.CommitLogEntry.getDefaultInstance());
}
/**
*
*
* <pre>
* A list of commit logs, ordered by 'git log' default order.
* </pre>
*
* <code>repeated .google.cloud.dataform.v1.CommitLogEntry commits = 1;</code>
*/
public java.util.List<com.google.cloud.dataform.v1.CommitLogEntry.Builder>
getCommitsBuilderList() {
return getCommitsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dataform.v1.CommitLogEntry,
com.google.cloud.dataform.v1.CommitLogEntry.Builder,
com.google.cloud.dataform.v1.CommitLogEntryOrBuilder>
getCommitsFieldBuilder() {
if (commitsBuilder_ == null) {
commitsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dataform.v1.CommitLogEntry,
com.google.cloud.dataform.v1.CommitLogEntry.Builder,
com.google.cloud.dataform.v1.CommitLogEntryOrBuilder>(
commits_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
commits_ = null;
}
return commitsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token, which can be sent as `page_token` to retrieve the next page.
* If this field is omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dataform.v1.FetchRepositoryHistoryResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.dataform.v1.FetchRepositoryHistoryResponse)
private static final com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse();
}
public static com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<FetchRepositoryHistoryResponse> PARSER =
new com.google.protobuf.AbstractParser<FetchRepositoryHistoryResponse>() {
@java.lang.Override
public FetchRepositoryHistoryResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<FetchRepositoryHistoryResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<FetchRepositoryHistoryResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dataform.v1.FetchRepositoryHistoryResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/hudi | 37,093 | hudi-flink-datasource/hudi-flink/src/test/java/org/apache/hudi/sink/cluster/ITTestHoodieFlinkClustering.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hudi.sink.cluster;
import org.apache.hudi.adapter.SinkFunctionAdapter;
import org.apache.hudi.avro.model.HoodieClusteringGroup;
import org.apache.hudi.avro.model.HoodieClusteringPlan;
import org.apache.hudi.client.HoodieFlinkWriteClient;
import org.apache.hudi.common.model.HoodieTableType;
import org.apache.hudi.common.model.WriteOperationType;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.timeline.HoodieInstant;
import org.apache.hudi.common.table.timeline.HoodieTimeline;
import org.apache.hudi.common.util.ClusteringUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.configuration.FlinkOptions;
import org.apache.hudi.sink.clustering.ClusteringCommitEvent;
import org.apache.hudi.sink.clustering.ClusteringCommitSink;
import org.apache.hudi.sink.clustering.ClusteringOperator;
import org.apache.hudi.sink.clustering.ClusteringPlanSourceFunction;
import org.apache.hudi.sink.clustering.FlinkClusteringConfig;
import org.apache.hudi.sink.clustering.HoodieFlinkClusteringJob;
import org.apache.hudi.table.HoodieFlinkTable;
import org.apache.hudi.util.AvroSchemaConverter;
import org.apache.hudi.util.CompactionUtil;
import org.apache.hudi.util.FlinkWriteClients;
import org.apache.hudi.util.StreamerUtil;
import org.apache.hudi.utils.FlinkMiniCluster;
import org.apache.hudi.utils.TestConfigurations;
import org.apache.hudi.utils.TestData;
import org.apache.hudi.utils.TestSQL;
import org.apache.avro.Schema;
import org.apache.flink.api.common.RuntimeExecutionMode;
import org.apache.flink.api.common.typeinfo.TypeInformation;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.configuration.ExecutionOptions;
import org.apache.flink.configuration.RestartStrategyOptions;
import org.apache.flink.streaming.api.datastream.DataStream;
import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment;
import org.apache.flink.table.api.DataTypes;
import org.apache.flink.table.api.EnvironmentSettings;
import org.apache.flink.table.api.Table;
import org.apache.flink.table.api.TableEnvironment;
import org.apache.flink.table.api.TableResult;
import org.apache.flink.table.api.ValidationException;
import org.apache.flink.table.api.bridge.java.StreamTableEnvironment;
import org.apache.flink.table.api.config.ExecutionConfigOptions;
import org.apache.flink.table.api.config.TableConfigOptions;
import org.apache.flink.table.api.internal.TableEnvironmentImpl;
import org.apache.flink.table.planner.plan.nodes.exec.utils.ExecNodeUtil;
import org.apache.flink.table.types.DataType;
import org.apache.flink.table.types.logical.RowType;
import org.apache.flink.types.Row;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.api.extension.ExtendWith;
import org.junit.jupiter.api.io.TempDir;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import java.io.File;
import java.time.Duration;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.TimeUnit;
import java.util.stream.Collectors;
import static org.apache.hudi.common.testutils.HoodieTestUtils.INSTANT_GENERATOR;
import static org.junit.jupiter.api.Assertions.assertDoesNotThrow;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
/**
* IT cases for {@link HoodieFlinkClusteringJob}.
*/
@ExtendWith(FlinkMiniCluster.class)
public class ITTestHoodieFlinkClustering {
private static final Map<String, String> EXPECTED = new HashMap<>();
static {
EXPECTED.put("par1", "[id1,par1,id1,Danny,23,1000,par1, id2,par1,id2,Stephen,33,2000,par1]");
EXPECTED.put("par2", "[id3,par2,id3,Julian,53,3000,par2, id4,par2,id4,Fabian,31,4000,par2]");
EXPECTED.put("par3", "[id5,par3,id5,Sophia,18,5000,par3, id6,par3,id6,Emma,20,6000,par3]");
EXPECTED.put("par4", "[id7,par4,id7,Bob,44,7000,par4, id8,par4,id8,Han,56,8000,par4]");
}
@TempDir
File tempFile;
@Test
public void testHoodieFlinkClustering() throws Exception {
// Create hoodie table and insert into data.
EnvironmentSettings settings = EnvironmentSettings.newInstance().inBatchMode().build();
TableEnvironment tableEnv = TableEnvironmentImpl.create(settings);
tableEnv.getConfig().getConfiguration()
.set(ExecutionConfigOptions.TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM, 4);
Map<String, String> options = new HashMap<>();
options.put(FlinkOptions.PATH.key(), tempFile.getAbsolutePath());
// use append mode
options.put(FlinkOptions.OPERATION.key(), WriteOperationType.INSERT.value());
String hoodieTableDDL = TestConfigurations.getCreateHoodieTableDDL("t1", options);
tableEnv.executeSql(hoodieTableDDL);
tableEnv.executeSql(TestSQL.INSERT_T1).await();
// wait for the asynchronous commit to finish
TimeUnit.SECONDS.sleep(3);
// Make configuration and setAvroSchema.
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
FlinkClusteringConfig cfg = new FlinkClusteringConfig();
cfg.path = tempFile.getAbsolutePath();
cfg.targetPartitions = 4;
cfg.sortMemory = 256;
Configuration conf = FlinkClusteringConfig.toFlinkConfig(cfg);
assertEquals(256, conf.get(FlinkOptions.WRITE_SORT_MEMORY));
// create metaClient
HoodieTableMetaClient metaClient = StreamerUtil.createMetaClient(conf);
// set the table name
conf.set(FlinkOptions.TABLE_NAME, metaClient.getTableConfig().getTableName());
// set record key field
conf.set(FlinkOptions.RECORD_KEY_FIELD, metaClient.getTableConfig().getRecordKeyFieldProp());
// set partition field
conf.set(FlinkOptions.PARTITION_PATH_FIELD, metaClient.getTableConfig().getPartitionFieldProp());
long ckpTimeout = env.getCheckpointConfig().getCheckpointTimeout();
conf.set(FlinkOptions.WRITE_COMMIT_ACK_TIMEOUT, ckpTimeout);
conf.set(FlinkOptions.PARTITION_PATH_FIELD, "partition");
// set table schema
CompactionUtil.setAvroSchema(conf, metaClient);
// judge whether have operation
// To compute the clustering instant time and do clustering.
try (HoodieFlinkWriteClient writeClient = FlinkWriteClients.createWriteClient(conf)) {
HoodieFlinkTable<?> table = writeClient.getHoodieTable();
Option<String> clusteringInstantTime = writeClient.scheduleClustering(Option.empty());
assertTrue(clusteringInstantTime.isPresent(), "The clustering plan should be scheduled");
// fetch the instant based on the configured execution sequence
table.getMetaClient().reloadActiveTimeline();
HoodieTimeline timeline = table.getActiveTimeline().filterPendingClusteringTimeline()
.filter(instant -> instant.getState() == HoodieInstant.State.REQUESTED);
// generate clustering plan
// should support configurable commit metadata
Option<Pair<HoodieInstant, HoodieClusteringPlan>> clusteringPlanOption = ClusteringUtils.getClusteringPlan(
table.getMetaClient(), timeline.lastInstant().get());
HoodieClusteringPlan clusteringPlan = clusteringPlanOption.get().getRight();
// Mark instant as clustering inflight
HoodieInstant instant = INSTANT_GENERATOR.getClusteringCommitRequestedInstant(clusteringInstantTime.get());
table.getActiveTimeline().transitionClusterRequestedToInflight(instant, Option.empty());
final Schema tableAvroSchema = StreamerUtil.getTableAvroSchema(table.getMetaClient(), false);
final DataType rowDataType = AvroSchemaConverter.convertToDataType(tableAvroSchema);
final RowType rowType = (RowType) rowDataType.getLogicalType();
DataStream<ClusteringCommitEvent> dataStream = env.addSource(new ClusteringPlanSourceFunction(clusteringInstantTime.get(), clusteringPlan, conf))
.name("clustering_source")
.uid("uid_clustering_source")
.rebalance()
.transform("clustering_task",
TypeInformation.of(ClusteringCommitEvent.class),
new ClusteringOperator(conf, rowType))
.setParallelism(clusteringPlan.getInputGroups().size());
ExecNodeUtil.setManagedMemoryWeight(dataStream.getTransformation(),
conf.get(FlinkOptions.WRITE_SORT_MEMORY) * 1024L * 1024L);
dataStream
.addSink(new ClusteringCommitSink(conf))
.name("clustering_commit")
.uid("uid_clustering_commit")
.setParallelism(1);
env.execute("flink_hudi_clustering");
TestData.checkWrittenData(tempFile, EXPECTED, 4);
}
}
@Test
public void testHoodieFlinkClusteringService() throws Exception {
// Create hoodie table and insert into data.
EnvironmentSettings settings = EnvironmentSettings.newInstance().inBatchMode().build();
TableEnvironment tableEnv = TableEnvironmentImpl.create(settings);
tableEnv.getConfig().getConfiguration()
.set(ExecutionConfigOptions.TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM, 4);
Map<String, String> options = new HashMap<>();
options.put(FlinkOptions.PATH.key(), tempFile.getAbsolutePath());
// use append mode
options.put(FlinkOptions.OPERATION.key(), WriteOperationType.INSERT.value());
String hoodieTableDDL = TestConfigurations.getCreateHoodieTableDDL("t1", options);
tableEnv.executeSql(hoodieTableDDL);
tableEnv.executeSql(TestSQL.INSERT_T1).await();
// wait for the asynchronous commit to finish
TimeUnit.SECONDS.sleep(3);
// Make configuration and setAvroSchema.
FlinkClusteringConfig cfg = new FlinkClusteringConfig();
cfg.path = tempFile.getAbsolutePath();
cfg.minClusteringIntervalSeconds = 3;
cfg.schedule = true;
Configuration conf = FlinkClusteringConfig.toFlinkConfig(cfg);
HoodieFlinkClusteringJob.AsyncClusteringService asyncClusteringService = new HoodieFlinkClusteringJob.AsyncClusteringService(cfg, conf);
asyncClusteringService.start(null);
// wait for the asynchronous commit to finish
TimeUnit.SECONDS.sleep(5);
asyncClusteringService.shutDown();
TestData.checkWrittenData(tempFile, EXPECTED, 4);
}
@Test
public void testHoodieFlinkClusteringSchedule() throws Exception {
// Create hoodie table and insert into data.
EnvironmentSettings settings = EnvironmentSettings.newInstance().inBatchMode().build();
TableEnvironment tableEnv = TableEnvironmentImpl.create(settings);
Map<String, String> options = new HashMap<>();
options.put(FlinkOptions.PATH.key(), tempFile.getAbsolutePath());
// use append mode
options.put(FlinkOptions.OPERATION.key(), WriteOperationType.INSERT.value());
String hoodieTableDDL = TestConfigurations.getCreateHoodieTableDDL("t1", options);
tableEnv.executeSql(hoodieTableDDL);
tableEnv.executeSql(TestSQL.INSERT_T1).await();
// wait for the asynchronous commit to finish
TimeUnit.SECONDS.sleep(3);
// Make configuration and setAvroSchema.
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
FlinkClusteringConfig cfg = new FlinkClusteringConfig();
cfg.path = tempFile.getAbsolutePath();
Configuration conf = FlinkClusteringConfig.toFlinkConfig(cfg);
// create metaClient
HoodieTableMetaClient metaClient = StreamerUtil.createMetaClient(conf);
// set the table name
conf.set(FlinkOptions.TABLE_NAME, metaClient.getTableConfig().getTableName());
// set record key field
conf.set(FlinkOptions.RECORD_KEY_FIELD, metaClient.getTableConfig().getRecordKeyFieldProp());
// set partition field
conf.set(FlinkOptions.PARTITION_PATH_FIELD, metaClient.getTableConfig().getPartitionFieldProp());
long ckpTimeout = env.getCheckpointConfig().getCheckpointTimeout();
conf.set(FlinkOptions.WRITE_COMMIT_ACK_TIMEOUT, ckpTimeout);
conf.set(FlinkOptions.PARTITION_PATH_FIELD, "partition");
conf.set(FlinkOptions.CLUSTERING_DELTA_COMMITS, 2);
conf.set(FlinkOptions.CLUSTERING_ASYNC_ENABLED, false);
conf.set(FlinkOptions.CLUSTERING_SCHEDULE_ENABLED, true);
// set table schema
CompactionUtil.setAvroSchema(conf, metaClient);
try (HoodieFlinkWriteClient writeClient = FlinkWriteClients.createWriteClient(conf)) {
// To compute the clustering instant time.
Option<String> clusteringInstantTime = writeClient.scheduleClustering(Option.empty());
assertFalse(clusteringInstantTime.isPresent(), "1 delta commit, the clustering plan should not be scheduled");
tableEnv.executeSql(TestSQL.INSERT_T1).await();
// wait for the asynchronous commit to finish
TimeUnit.SECONDS.sleep(3);
clusteringInstantTime = writeClient.scheduleClustering(Option.empty());
assertTrue(clusteringInstantTime.isPresent(), "2 delta commits, the clustering plan should be scheduled");
}
}
@Test
public void testHoodieFlinkClusteringScheduleAfterArchive() throws Exception {
// Create hoodie table and insert into data.
EnvironmentSettings settings = EnvironmentSettings.newInstance().inBatchMode().build();
TableEnvironment tableEnv = TableEnvironmentImpl.create(settings);
tableEnv.getConfig().getConfiguration()
.set(ExecutionConfigOptions.TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM, 1);
Map<String, String> options = new HashMap<>();
options.put(FlinkOptions.HIVE_STYLE_PARTITIONING.key(), "false");
options.put(FlinkOptions.PATH.key(), tempFile.getAbsolutePath());
// use append mode
options.put(FlinkOptions.OPERATION.key(), WriteOperationType.INSERT.value());
String hoodieTableDDL = TestConfigurations.getCreateHoodieTableDDL("t1", options);
tableEnv.executeSql(hoodieTableDDL);
tableEnv.executeSql(TestSQL.INSERT_T1).await();
tableEnv.executeSql(TestSQL.INSERT_T1).await();
// wait for the asynchronous commit to finish
TimeUnit.SECONDS.sleep(3);
// Make configuration and setAvroSchema.
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
FlinkClusteringConfig cfg = new FlinkClusteringConfig();
cfg.path = tempFile.getAbsolutePath();
cfg.targetPartitions = 4;
Configuration conf = FlinkClusteringConfig.toFlinkConfig(cfg);
// create metaClient
HoodieTableMetaClient metaClient = StreamerUtil.createMetaClient(conf);
// set the table name
conf.set(FlinkOptions.TABLE_NAME, metaClient.getTableConfig().getTableName());
// set record key field
conf.set(FlinkOptions.RECORD_KEY_FIELD, metaClient.getTableConfig().getRecordKeyFieldProp());
// set partition field
conf.set(FlinkOptions.PARTITION_PATH_FIELD, metaClient.getTableConfig().getPartitionFieldProp());
long ckpTimeout = env.getCheckpointConfig().getCheckpointTimeout();
conf.set(FlinkOptions.WRITE_COMMIT_ACK_TIMEOUT, ckpTimeout);
conf.set(FlinkOptions.PARTITION_PATH_FIELD, "partition");
// set archive commits
conf.set(FlinkOptions.ARCHIVE_MAX_COMMITS, 2);
conf.set(FlinkOptions.ARCHIVE_MIN_COMMITS, 1);
conf.set(FlinkOptions.CLEAN_RETAIN_COMMITS, 0);
// set table schema
CompactionUtil.setAvroSchema(conf, metaClient);
// judge whether have operation
// To compute the clustering instant time and do clustering.
try (HoodieFlinkWriteClient<?> writeClient = FlinkWriteClients.createWriteClient(conf)) {
HoodieFlinkTable<?> table = writeClient.getHoodieTable();
Option<String> firstClusteringInstant = writeClient.scheduleClustering(Option.empty());
assertTrue(firstClusteringInstant.isPresent(), "The clustering plan should be scheduled");
// fetch the instant based on the configured execution sequence
table.getMetaClient().reloadActiveTimeline();
HoodieTimeline timeline = table.getActiveTimeline().filterPendingClusteringTimeline()
.filter(i -> i.getState() == HoodieInstant.State.REQUESTED);
// generate clustering plan
// should support configurable commit metadata
Option<Pair<HoodieInstant, HoodieClusteringPlan>> clusteringPlanOption = ClusteringUtils.getClusteringPlan(
table.getMetaClient(), timeline.lastInstant().get());
HoodieClusteringPlan clusteringPlan = clusteringPlanOption.get().getRight();
// Mark instant as clustering inflight
HoodieInstant instant = INSTANT_GENERATOR.getClusteringCommitRequestedInstant(firstClusteringInstant.get());
table.getActiveTimeline().transitionClusterRequestedToInflight(instant, Option.empty());
final Schema tableAvroSchema = StreamerUtil.getTableAvroSchema(table.getMetaClient(), false);
final DataType rowDataType = AvroSchemaConverter.convertToDataType(tableAvroSchema);
final RowType rowType = (RowType) rowDataType.getLogicalType();
DataStream<ClusteringCommitEvent> dataStream =
env.addSource(new ClusteringPlanSourceFunction(firstClusteringInstant.get(), clusteringPlan, conf))
.name("clustering_source")
.uid("uid_clustering_source")
.rebalance()
.transform(
"clustering_task",
TypeInformation.of(ClusteringCommitEvent.class),
new ClusteringOperator(conf, rowType))
.setParallelism(clusteringPlan.getInputGroups().size());
ExecNodeUtil.setManagedMemoryWeight(
dataStream.getTransformation(),
conf.get(FlinkOptions.WRITE_SORT_MEMORY) * 1024L * 1024L);
// keep pending clustering, not committing clustering
dataStream
.addSink(new DiscardingSink<>())
.name("discarding-sink")
.uid("uid_discarding-sink")
.setParallelism(1);
env.execute("flink_hudi_clustering");
tableEnv.executeSql(TestSQL.INSERT_T1).await();
// wait for the asynchronous commit to finish
TimeUnit.SECONDS.sleep(3);
// archive the first commit, retain the second commit before the inflight cluster commit
writeClient.archive();
assertTrue(writeClient.scheduleClustering(Option.empty()).isPresent(), "The clustering plan should be scheduled");
table.getMetaClient().reloadActiveTimeline();
timeline = table.getActiveTimeline().filterPendingClusteringTimeline()
.filter(i -> i.getState() == HoodieInstant.State.REQUESTED);
HoodieInstant secondClusteringInstant = timeline.lastInstant().get();
List<HoodieClusteringGroup> inputFileGroups = ClusteringUtils.getClusteringPlan(table.getMetaClient(), secondClusteringInstant).get().getRight().getInputGroups();
// clustering plan has no previous file slice generated by previous pending clustering
assertFalse(inputFileGroups
.stream().anyMatch(fg -> fg.getSlices()
.stream().anyMatch(s -> s.getDataFilePath().contains(firstClusteringInstant.get()))));
}
}
/**
* Test to ensure that creating a table with a column of TIMESTAMP(9) will throw errors
* @throws Exception
*/
@Test
public void testHoodieFlinkClusteringWithTimestampNanos() {
// create hoodie table and insert into data
EnvironmentSettings settings = EnvironmentSettings.newInstance().inBatchMode().build();
TableEnvironment tableEnv = TableEnvironmentImpl.create(settings);
tableEnv.getConfig().getConfiguration()
.set(ExecutionConfigOptions.TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM, 4);
Map<String, String> options = new HashMap<>();
options.put(FlinkOptions.PATH.key(), tempFile.getAbsolutePath());
// use append mode
options.put(FlinkOptions.OPERATION.key(), WriteOperationType.INSERT.value());
options.put(FlinkOptions.INSERT_CLUSTER.key(), "false");
// row schema
final DataType dataType = DataTypes.ROW(
DataTypes.FIELD("uuid", DataTypes.VARCHAR(20)),// record key
DataTypes.FIELD("name", DataTypes.VARCHAR(10)),
DataTypes.FIELD("age", DataTypes.INT()),
DataTypes.FIELD("ts", DataTypes.TIMESTAMP(9)), // precombine field
DataTypes.FIELD("partition", DataTypes.VARCHAR(10)))
.notNull();
final RowType rowType = (RowType) dataType.getLogicalType();
final List<String> fields = rowType.getFields().stream()
.map(RowType.RowField::asSummaryString).collect(Collectors.toList());
String hoodieTableDDL = TestConfigurations.getCreateHoodieTableDDL(
"t1", fields, options, true, "uuid", "partition");
TableResult tableResult = tableEnv.executeSql(hoodieTableDDL);
// insert rows with timestamp of microseconds precision; timestamp(6)
final String insertSql = "insert into t1 values\n"
+ "('id1','Danny',23,TIMESTAMP '1970-01-01 00:00:01.100001001','par1'),\n"
+ "('id2','Stephen',33,TIMESTAMP '1970-01-01 00:00:02.100001001','par1'),\n"
+ "('id3','Julian',53,TIMESTAMP '1970-01-01 00:00:03.100001001','par2'),\n"
+ "('id4','Fabian',31,TIMESTAMP '1970-01-01 00:00:04.100001001','par2'),\n"
+ "('id5','Sophia',18,TIMESTAMP '1970-01-01 00:00:05.100001001','par3'),\n"
+ "('id6','Emma',20,TIMESTAMP '1970-01-01 00:00:06.100001001','par3'),\n"
+ "('id7','Bob',44,TIMESTAMP '1970-01-01 00:00:07.100001001','par4'),\n"
+ "('id8','Han',56,TIMESTAMP '1970-01-01 00:00:08.100001001','par4')";
assertThrows(ValidationException.class, () -> tableEnv.executeSql(insertSql),
"Avro does not support TIMESTAMP type with precision: 9, it only support precisions <= 6.");
}
@Test
public void testHoodieFlinkClusteringWithTimestampMicros() throws Exception {
// create hoodie table and insert into data
EnvironmentSettings settings = EnvironmentSettings.newInstance().inBatchMode().build();
TableEnvironment tableEnv = TableEnvironmentImpl.create(settings);
tableEnv.getConfig().getConfiguration()
.set(ExecutionConfigOptions.TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM, 4);
Map<String, String> options = new HashMap<>();
options.put(FlinkOptions.PATH.key(), tempFile.getAbsolutePath());
// use append mode
options.put(FlinkOptions.OPERATION.key(), WriteOperationType.INSERT.value());
// row schema
final DataType dataType = DataTypes.ROW(
DataTypes.FIELD("uuid", DataTypes.VARCHAR(20)),// record key
DataTypes.FIELD("name", DataTypes.VARCHAR(10)),
DataTypes.FIELD("age", DataTypes.INT()),
DataTypes.FIELD("ts", DataTypes.TIMESTAMP(6)), // precombine field
DataTypes.FIELD("partition", DataTypes.VARCHAR(10)))
.notNull();
final RowType rowType = (RowType) dataType.getLogicalType();
final List<String> fields = rowType.getFields().stream()
.map(RowType.RowField::asSummaryString).collect(Collectors.toList());
String hoodieTableDDL = TestConfigurations.getCreateHoodieTableDDL(
"t1", fields, options, true, "uuid", "partition");
tableEnv.executeSql(hoodieTableDDL);
// insert rows with timestamp of microseconds precision; timestamp(6)
final String insertSql = "insert into t1 values\n"
+ "('id1','Danny',23,TIMESTAMP '1970-01-01 00:00:01.100001','par1'),\n"
+ "('id2','Stephen',33,TIMESTAMP '1970-01-01 00:00:02.100001','par1'),\n"
+ "('id3','Julian',53,TIMESTAMP '1970-01-01 00:00:03.100001','par2'),\n"
+ "('id4','Fabian',31,TIMESTAMP '1970-01-01 00:00:04.100001','par2'),\n"
+ "('id5','Sophia',18,TIMESTAMP '1970-01-01 00:00:05.100001','par3'),\n"
+ "('id6','Emma',20,TIMESTAMP '1970-01-01 00:00:06.100001','par3'),\n"
+ "('id7','Bob',44,TIMESTAMP '1970-01-01 00:00:07.100001','par4'),\n"
+ "('id8','Han',56,TIMESTAMP '1970-01-01 00:00:08.100001','par4')";
tableEnv.executeSql(insertSql).await();
// wait for the asynchronous commit to finish
TimeUnit.SECONDS.sleep(3);
runCluster(rowType);
// test output
final Map<String, String> expected = new HashMap<>();
expected.put("par1", "[id1,par1,id1,Danny,23,1100001,par1, id2,par1,id2,Stephen,33,2100001,par1]");
expected.put("par2", "[id3,par2,id3,Julian,53,3100001,par2, id4,par2,id4,Fabian,31,4100001,par2]");
expected.put("par3", "[id5,par3,id5,Sophia,18,5100001,par3, id6,par3,id6,Emma,20,6100001,par3]");
expected.put("par4", "[id7,par4,id7,Bob,44,7100001,par4, id8,par4,id8,Han,56,8100001,par4]");
TestData.checkWrittenData(tempFile, expected, 4);
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void testInsertWithDifferentRecordKeyNullabilityAndClustering(boolean withPk) throws Exception {
EnvironmentSettings settings = EnvironmentSettings.newInstance().inBatchMode().build();
TableEnvironment tableEnv = TableEnvironmentImpl.create(settings);
tableEnv.getConfig().getConfiguration()
.set(ExecutionConfigOptions.TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM, 4);
// if create a table without primary key, the nullability of the record key field is nullable
// otherwise, the nullability is not nullable.
String pkConstraint = withPk ? ", primary key (uuid) not enforced\n" : "";
String tblWithoutPkDDL = "create table t1(\n"
+ " `uuid` VARCHAR(20)\n"
+ ", `name` VARCHAR(10)\n"
+ ", `age` INT\n"
+ ", `ts` TIMESTAMP(3)\n"
+ ", `partition` VARCHAR(10)\n"
+ pkConstraint
+ ")\n"
+ "PARTITIONED BY (`partition`)\n"
+ "with (\n"
+ " 'connector' = 'hudi',\n"
+ " 'hoodie.datasource.write.recordkey.field' = 'uuid',\n"
+ " 'path' = '" + tempFile.getAbsolutePath() + "'\n"
+ ")";
tableEnv.executeSql(tblWithoutPkDDL);
tableEnv.executeSql(TestSQL.INSERT_T1).await();
final RowType rowType = (RowType) DataTypes.ROW(
DataTypes.FIELD("uuid", DataTypes.VARCHAR(20).notNull()), // primary key set as not null
DataTypes.FIELD("name", DataTypes.VARCHAR(10)),
DataTypes.FIELD("age", DataTypes.INT()),
DataTypes.FIELD("ts", DataTypes.TIMESTAMP(3)),
DataTypes.FIELD("partition", DataTypes.VARCHAR(10)))
.notNull().getLogicalType();
// run cluster with row type
runCluster(rowType);
final Map<String, String> expected = new HashMap<>();
expected.put("par1", "[id1,par1,id1,Danny,23,1000,par1, id2,par1,id2,Stephen,33,2000,par1]");
expected.put("par2", "[id3,par2,id3,Julian,53,3000,par2, id4,par2,id4,Fabian,31,4000,par2]");
expected.put("par3", "[id5,par3,id5,Sophia,18,5000,par3, id6,par3,id6,Emma,20,6000,par3]");
expected.put("par4", "[id7,par4,id7,Bob,44,7000,par4, id8,par4,id8,Han,56,8000,par4]");
TestData.checkWrittenData(tempFile, expected, 4);
}
@Test
public void testOfflineClusterFailoverAfterCommit() throws Exception {
StreamTableEnvironment tableEnv = prepareEnvAndTable();
FlinkClusteringConfig cfg = new FlinkClusteringConfig();
cfg.path = tempFile.getAbsolutePath();
cfg.targetPartitions = 4;
Configuration conf = FlinkClusteringConfig.toFlinkConfig(cfg);
assertDoesNotThrow(() -> runOfflineCluster(tableEnv, conf));
Table result = tableEnv.sqlQuery("select count(*) from t1");
assertEquals(16L, tableEnv.toDataStream(result, Row.class).executeAndCollect(1).get(0).getField(0));
}
/**
* schedule clustering, run clustering.
*/
private void runCluster(RowType rowType) throws Exception {
// make configuration and setAvroSchema.
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment();
FlinkClusteringConfig cfg = new FlinkClusteringConfig();
cfg.path = tempFile.getAbsolutePath();
cfg.targetPartitions = 4;
Configuration conf = FlinkClusteringConfig.toFlinkConfig(cfg);
// create metaClient
HoodieTableMetaClient metaClient = StreamerUtil.createMetaClient(conf);
// set the table name
conf.set(FlinkOptions.TABLE_NAME, metaClient.getTableConfig().getTableName());
// set record key field
conf.set(FlinkOptions.RECORD_KEY_FIELD, metaClient.getTableConfig().getRecordKeyFieldProp());
// set partition field
conf.set(FlinkOptions.PARTITION_PATH_FIELD, metaClient.getTableConfig().getPartitionFieldProp());
long ckpTimeout = env.getCheckpointConfig().getCheckpointTimeout();
conf.set(FlinkOptions.WRITE_COMMIT_ACK_TIMEOUT, ckpTimeout);
conf.set(FlinkOptions.PARTITION_PATH_FIELD, "partition");
// set table schema
CompactionUtil.setAvroSchema(conf, metaClient);
// judge whether have operation
// To compute the clustering instant time and do clustering.
try (HoodieFlinkWriteClient writeClient = FlinkWriteClients.createWriteClient(conf)) {
HoodieFlinkTable<?> table = writeClient.getHoodieTable();
Option<String> clusteringInstantTime = writeClient.scheduleClustering(Option.empty());
assertTrue(clusteringInstantTime.isPresent(), "The clustering plan should be scheduled");
// fetch the instant based on the configured execution sequence
table.getMetaClient().reloadActiveTimeline();
HoodieTimeline timeline = table.getActiveTimeline().filterPendingClusteringTimeline()
.filter(instant -> instant.getState() == HoodieInstant.State.REQUESTED);
// generate clustering plan
// should support configurable commit metadata
Option<Pair<HoodieInstant, HoodieClusteringPlan>> clusteringPlanOption = ClusteringUtils.getClusteringPlan(
table.getMetaClient(), timeline.lastInstant().get());
HoodieClusteringPlan clusteringPlan = clusteringPlanOption.get().getRight();
// Mark instant as clustering inflight
HoodieInstant instant = INSTANT_GENERATOR.getClusteringCommitRequestedInstant(clusteringInstantTime.get());
table.getActiveTimeline().transitionClusterRequestedToInflight(instant, Option.empty());
DataStream<ClusteringCommitEvent> dataStream = env.addSource(new ClusteringPlanSourceFunction(clusteringInstantTime.get(), clusteringPlan, conf))
.name("clustering_source")
.uid("uid_clustering_source")
.rebalance()
.transform("clustering_task",
TypeInformation.of(ClusteringCommitEvent.class),
new ClusteringOperator(conf, rowType))
.setParallelism(clusteringPlan.getInputGroups().size());
ExecNodeUtil.setManagedMemoryWeight(dataStream.getTransformation(),
conf.get(FlinkOptions.WRITE_SORT_MEMORY) * 1024L * 1024L);
dataStream
.addSink(new ClusteringCommitSink(conf))
.name("clustering_commit")
.uid("uid_clustering_commit")
.setParallelism(1);
env.execute("flink_hudi_clustering");
}
}
private StreamTableEnvironment prepareEnvAndTable() {
// Create hoodie table and insert into data.
Configuration conf = new org.apache.flink.configuration.Configuration();
conf.set(ExecutionOptions.RUNTIME_MODE, RuntimeExecutionMode.BATCH);
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(conf);
StreamTableEnvironment tEnv = StreamTableEnvironment.create(env);
tEnv.getConfig().getConfiguration().set(ExecutionConfigOptions.TABLE_EXEC_RESOURCE_DEFAULT_PARALLELISM, 4);
tEnv.getConfig().getConfiguration().set(TableConfigOptions.TABLE_DML_SYNC, true);
Map<String, String> options = new HashMap<>();
options.put(FlinkOptions.PATH.key(), tempFile.getAbsolutePath());
// use append mode
options.put(FlinkOptions.OPERATION.key(), WriteOperationType.INSERT.value());
options.put(FlinkOptions.INSERT_CLUSTER.key(), "false");
options.put(FlinkOptions.TABLE_TYPE.key(), HoodieTableType.COPY_ON_WRITE.name());
String hoodieTableDDL = TestConfigurations.getCreateHoodieTableDDL("t1", options);
tEnv.executeSql(hoodieTableDDL);
tEnv.executeSql(TestSQL.INSERT_T1);
return tEnv;
}
/**
* schedule clustering, insert another batch, run clustering.
*/
private void runOfflineCluster(TableEnvironment tableEnv, Configuration conf) throws Exception {
// Make configuration and setAvroSchema.
Configuration envConf = new Configuration();
envConf.set(RestartStrategyOptions.RESTART_STRATEGY, "fixed-delay");
envConf.set(RestartStrategyOptions.RESTART_STRATEGY_FIXED_DELAY_ATTEMPTS, 1);
envConf.set(RestartStrategyOptions.RESTART_STRATEGY_FIXED_DELAY_DELAY, Duration.ofMillis(1));
StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(envConf);
// create metaClient
HoodieTableMetaClient metaClient = StreamerUtil.createMetaClient(conf);
// set the table name
conf.set(FlinkOptions.TABLE_NAME, metaClient.getTableConfig().getTableName());
// set record key field
conf.set(FlinkOptions.RECORD_KEY_FIELD, metaClient.getTableConfig().getRecordKeyFieldProp());
// set partition field
conf.set(FlinkOptions.PARTITION_PATH_FIELD, metaClient.getTableConfig().getPartitionFieldProp());
long ckpTimeout = env.getCheckpointConfig().getCheckpointTimeout();
conf.set(FlinkOptions.WRITE_COMMIT_ACK_TIMEOUT, ckpTimeout);
conf.set(FlinkOptions.PARTITION_PATH_FIELD, "partition");
// set table schema
CompactionUtil.setAvroSchema(conf, metaClient);
// judge whether have operation
// To compute the clustering instant time and do clustering.
try (HoodieFlinkWriteClient writeClient = FlinkWriteClients.createWriteClient(conf)) {
HoodieFlinkTable<?> table = writeClient.getHoodieTable();
Option<String> clusteringInstantTime = writeClient.scheduleClustering(Option.empty());
assertTrue(clusteringInstantTime.isPresent(), "The clustering plan should be scheduled");
tableEnv.executeSql(TestSQL.INSERT_T1);
// fetch the instant based on the configured execution sequence
table.getMetaClient().reloadActiveTimeline();
HoodieTimeline timeline = table.getActiveTimeline().filterPendingClusteringTimeline()
.filter(instant -> instant.getState() == HoodieInstant.State.REQUESTED);
// generate clustering plan
// should support configurable commit metadata
Option<Pair<HoodieInstant, HoodieClusteringPlan>> clusteringPlanOption = ClusteringUtils.getClusteringPlan(
table.getMetaClient(), timeline.lastInstant().get());
HoodieClusteringPlan clusteringPlan = clusteringPlanOption.get().getRight();
// Mark instant as clustering inflight
HoodieInstant instant = INSTANT_GENERATOR.getClusteringCommitRequestedInstant(clusteringInstantTime.get());
table.getActiveTimeline().transitionClusterRequestedToInflight(instant, Option.empty());
final Schema tableAvroSchema = StreamerUtil.getTableAvroSchema(table.getMetaClient(), false);
final DataType rowDataType = AvroSchemaConverter.convertToDataType(tableAvroSchema);
final RowType rowType = (RowType) rowDataType.getLogicalType();
DataStream<ClusteringCommitEvent> dataStream = env.addSource(new ClusteringPlanSourceFunction(clusteringInstantTime.get(), clusteringPlan, conf))
.name("clustering_source")
.uid("uid_clustering_source")
.rebalance()
.transform("clustering_task",
TypeInformation.of(ClusteringCommitEvent.class),
new ClusteringOperator(conf, rowType))
.setParallelism(clusteringPlan.getInputGroups().size());
ExecNodeUtil.setManagedMemoryWeight(dataStream.getTransformation(),
conf.get(FlinkOptions.WRITE_SORT_MEMORY) * 1024L * 1024L);
dataStream
.addSink(new ClusteringCommitTestSink(conf))
.name("clustering_commit")
.uid("uid_clustering_commit")
.setParallelism(1);
env.execute("flink_hudi_clustering");
}
}
// -------------------------------------------------------------------------
// Utilities
// -------------------------------------------------------------------------
private static final class DiscardingSink<T> implements SinkFunctionAdapter<T> {
private static final long serialVersionUID = 1L;
@Override
public void invoke(T value) {
// do nothing
}
}
}
|
apache/geode | 36,944 | geode-core/src/distributedTest/java/org/apache/geode/internal/cache/ha/HARegionQueueDUnitTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more contributor license
* agreements. See the NOTICE file distributed with this work for additional information regarding
* copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance with the License. You may obtain a
* copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software distributed under the License
* is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express
* or implied. See the License for the specific language governing permissions and limitations under
* the License.
*/
package org.apache.geode.internal.cache.ha;
import static org.apache.geode.cache.Region.SEPARATOR;
import static org.apache.geode.internal.cache.ha.HARegionQueue.NON_BLOCKING_HA_QUEUE;
import static org.apache.geode.internal.cache.ha.HARegionQueue.getHARegionQueueInstance;
import static org.apache.geode.internal.statistics.StatisticsClockFactory.disabledClock;
import static org.apache.geode.test.awaitility.GeodeAwaitility.await;
import static org.apache.geode.test.dunit.Assert.assertEquals;
import static org.apache.geode.test.dunit.Assert.assertNotNull;
import static org.apache.geode.test.dunit.Assert.assertNull;
import static org.apache.geode.test.dunit.Assert.assertTrue;
import static org.apache.geode.test.dunit.Assert.fail;
import static org.apache.geode.test.dunit.ThreadUtils.join;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.Set;
import org.junit.Ignore;
import org.junit.Test;
import org.junit.experimental.categories.Category;
import org.mockito.AdditionalAnswers;
import org.apache.geode.LogWriter;
import org.apache.geode.cache.AttributesFactory;
import org.apache.geode.cache.CacheException;
import org.apache.geode.cache.CacheFactory;
import org.apache.geode.cache.DataPolicy;
import org.apache.geode.cache.EntryEvent;
import org.apache.geode.cache.Region;
import org.apache.geode.cache.Scope;
import org.apache.geode.cache.util.CacheListenerAdapter;
import org.apache.geode.cache30.CacheSerializableRunnable;
import org.apache.geode.distributed.DistributedSystem;
import org.apache.geode.internal.cache.Conflatable;
import org.apache.geode.internal.cache.EntryEventImpl;
import org.apache.geode.internal.cache.EventID;
import org.apache.geode.internal.cache.HARegion;
import org.apache.geode.internal.cache.InternalCache;
import org.apache.geode.test.awaitility.GeodeAwaitility;
import org.apache.geode.test.dunit.Host;
import org.apache.geode.test.dunit.SerializableCallable;
import org.apache.geode.test.dunit.ThreadUtils;
import org.apache.geode.test.dunit.VM;
import org.apache.geode.test.dunit.WaitCriterion;
import org.apache.geode.test.dunit.internal.JUnit4DistributedTestCase;
import org.apache.geode.test.junit.categories.ClientSubscriptionTest;
@Category({ClientSubscriptionTest.class})
public class HARegionQueueDUnitTest extends JUnit4DistributedTestCase {
private static volatile boolean toCnt = true;
private static volatile Thread createQueuesThread;
private static InternalCache cache = null;
private static HARegionQueue hrq = null;
private static Thread[] opThreads;
private VM vm0 = null;
private VM vm1 = null;
private VM vm3 = null;
private VM vm2 = null;
/**
* get the VM's
*/
@Override
public final void postSetUp() throws Exception {
final Host host = Host.getHost(0);
vm0 = host.getVM(0);
vm1 = host.getVM(1);
vm2 = host.getVM(2);
vm3 = host.getVM(3);
vm0.invoke(() -> HARegionQueueDUnitTest.toCnt = true);
vm1.invoke(() -> HARegionQueueDUnitTest.toCnt = true);
vm2.invoke(() -> HARegionQueueDUnitTest.toCnt = true);
vm3.invoke(() -> HARegionQueueDUnitTest.toCnt = true);
}
/**
* close the cache in tearDown
*/
@Override
public final void preTearDown() throws Exception {
vm0.invoke(HARegionQueueDUnitTest::closeCache);
vm1.invoke(HARegionQueueDUnitTest::closeCache);
vm2.invoke(HARegionQueueDUnitTest::closeCache);
vm3.invoke(HARegionQueueDUnitTest::closeCache);
cache = null;
hrq = null;
opThreads = null;
}
/**
* create cache
*/
private InternalCache createCache() throws CacheException {
Properties props = new Properties();
DistributedSystem ds = getSystem(props);
ds.disconnect();
ds = getSystem(props);
InternalCache cache = null;
cache = (InternalCache) CacheFactory.create(ds);
if (cache == null) {
// TODO: never throw an anonymous inner class
throw new CacheException("CacheFactory.create() returned null ") {};
}
return cache;
}
/**
* 1) Create mirrored HARegion region1 in VM1 and VM2 2) do a put in VM1 3) assert that the put
* has not propagated from VM1 to VM2 4) do a put in VM2 5) assert that the value in VM1 has not
* changed to due to put in VM2 6) assert put in VM2 was successful by doing a get
*/
@Test
public void testLocalPut() throws Exception {
vm0.invoke(HARegionQueueDUnitTest::createRegion);
vm1.invoke(HARegionQueueDUnitTest::createRegion);
vm0.invoke(HARegionQueueDUnitTest::putValue1);
vm1.invoke(HARegionQueueDUnitTest::getNull);
vm1.invoke(HARegionQueueDUnitTest::putValue2);
vm0.invoke(HARegionQueueDUnitTest::getValue1);
vm1.invoke(HARegionQueueDUnitTest::getValue2);
}
/**
* 1) Create mirrored HARegion region1 in VM1 and VM2 2) do a put in VM1 3) assert that the put
* has not propagated from VM1 to VM2 4) do a put in VM2 5) assert that the value in VM1 has not
* changed to due to put in VM2 6) assert respective puts the VMs were successful by doing a get
* 7) localDestroy key in VM1 8) assert key has been destroyed in VM1 9) assert key has not been
* destroyed in VM2
*/
@Test
public void testLocalDestroy() throws Exception {
vm0.invoke(HARegionQueueDUnitTest::createRegion);
vm1.invoke(HARegionQueueDUnitTest::createRegion);
vm0.invoke(HARegionQueueDUnitTest::putValue1);
vm1.invoke(HARegionQueueDUnitTest::getNull);
vm1.invoke(HARegionQueueDUnitTest::putValue2);
vm0.invoke(HARegionQueueDUnitTest::getValue1);
vm1.invoke(HARegionQueueDUnitTest::getValue2);
vm0.invoke(HARegionQueueDUnitTest::destroy);
vm0.invoke(HARegionQueueDUnitTest::getNull);
vm1.invoke(HARegionQueueDUnitTest::getValue2);
}
/**
* 1) Create mirrored HARegion region1 in VM1 2) do a put in VM1 3) get the value in VM1 to assert
* put has happened successfully 4) Create mirrored HARegion region1 in VM2 5) do a get in VM2 to
* verify that value was got through GII 6) do a put in VM2 7) assert put in VM2 was successful
*/
@Test
public void testGII() throws Exception {
vm0.invoke(HARegionQueueDUnitTest::createRegion);
vm0.invoke(HARegionQueueDUnitTest::putValue1);
vm0.invoke(HARegionQueueDUnitTest::getValue1);
vm1.invoke(HARegionQueueDUnitTest::createRegion);
vm1.invoke(HARegionQueueDUnitTest::getValue1);
vm1.invoke(HARegionQueueDUnitTest::putValue2);
vm1.invoke(HARegionQueueDUnitTest::getValue2);
}
/**
* 1) Create mirrored HARegion region1 in VM1 2) do a put in VM1 3) get the value in VM1 to assert
* put has happened successfully 4) Create mirrored HARegion region1 in VM2 5) do a get in VM2 to
* verify that value was got through GII 6) do a put in VM2 7) assert put in VM2 was successful
*/
@Test
public void testQRM() throws Exception {
vm0.invoke(HARegionQueueDUnitTest::createRegionQueue);
vm1.invoke(HARegionQueueDUnitTest::createRegionQueue);
vm0.invoke(HARegionQueueDUnitTest::verifyAddingDispatchMesgs);
vm1.invoke(HARegionQueueDUnitTest::verifyDispatchedMessagesRemoved);
}
/**
* Behaviour of take() has been changed for reliable messaging feature. Region queue take()
* operation will no longer add to the Dispatch Message Map. Hence disabling the test - SUYOG
*
* Test for #35988 HARegionQueue.take() is not functioning as expected
*/
@Ignore("TODO: this test was disabled")
@Test
public void testBugNo35988() throws Exception {
CacheSerializableRunnable createQueue =
new CacheSerializableRunnable("CreateCache, HARegionQueue and start thread") {
@Override
public void run2() throws CacheException {
HARegionQueueDUnitTest test = new HARegionQueueDUnitTest();
// TODO:ASIF: Bcoz of the QRM thread cannot take frequency below
// 1 second , thus we need to carfully evaluate what to do. Though
// in this case 1 second instead of 500 ms will work
// System.getProperties().put("QueueRemovalThreadWaitTime", new Long(500));
cache = test.createCache();
cache.setMessageSyncInterval(1);
HARegionQueueAttributes hrqa = new HARegionQueueAttributes();
hrqa.setExpiryTime(300);
try {
hrq = HARegionQueue.getHARegionQueueInstance("testregion1", cache, hrqa,
HARegionQueue.NON_BLOCKING_HA_QUEUE, false, disabledClock());
// Do 1000 putand 100 take in a separate thread
hrq.put(new ConflatableObject(1L, 1L,
new EventID(new byte[] {0}, 1, 1), false, "dummy"));
} catch (Exception e) {
throw new AssertionError(e);
}
}
};
vm0.invoke(createQueue);
vm1.invoke(createQueue);
vm0.invoke(new CacheSerializableRunnable("takeFromVm0") {
@Override
public void run2() throws CacheException {
try {
Conflatable obj = (Conflatable) hrq.take();
assertNotNull(obj);
} catch (Exception e) {
throw new AssertionError(e);
}
}
});
vm1.invoke(new CacheSerializableRunnable("checkInVm1") {
@Override
public void run2() throws CacheException {
WaitCriterion ev = new WaitCriterion() {
@Override
public boolean done() {
Thread.yield(); // TODO is this necessary?
return hrq.size() == 0;
}
@Override
public String description() {
return null;
}
};
GeodeAwaitility.await().untilAsserted(ev);
}
});
}
/**
* create a client with 2 regions sharing a common writer
*/
private static void createRegion() throws Exception {
HARegionQueueDUnitTest test = new HARegionQueueDUnitTest();
cache = test.createCache();
AttributesFactory factory = new AttributesFactory();
factory.setScope(Scope.DISTRIBUTED_ACK);
factory.setDataPolicy(DataPolicy.REPLICATE);
// Mock the HARegionQueue and answer the input CachedDeserializable when updateHAEventWrapper is
// called
HARegionQueue harq = mock(HARegionQueue.class);
when(harq.updateHAEventWrapper(any(), any(), any()))
.thenAnswer(AdditionalAnswers.returnsSecondArg());
HARegion.getInstance("HARegionQueueDUnitTest_region", cache, harq,
factory.create(), disabledClock());
}
private static void createRegionQueue() throws Exception {
HARegionQueueDUnitTest test = new HARegionQueueDUnitTest();
cache = test.createCache();
/*
* AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.DISTRIBUTED_ACK);
* factory.setDataPolicy(DataPolicy.REPLICATE);
*/
hrq = HARegionQueue.getHARegionQueueInstance("HARegionQueueDUnitTest_region", cache,
HARegionQueue.NON_BLOCKING_HA_QUEUE, false, disabledClock());
EventID id1 = new EventID(new byte[] {1}, 1, 1);
EventID id2 = new EventID(new byte[] {1}, 1, 2);
ConflatableObject c1 =
new ConflatableObject("1", "1", id1, false, "HARegionQueueDUnitTest_region");
ConflatableObject c2 =
new ConflatableObject("2", "2", id2, false, "HARegionQueueDUnitTest_region");
hrq.put(c1);
hrq.put(c2);
}
private static void createRegionQueue2() throws Exception {
HARegionQueueDUnitTest test = new HARegionQueueDUnitTest();
cache = test.createCache();
/*
* AttributesFactory factory = new AttributesFactory(); factory.setScope(Scope.DISTRIBUTED_ACK);
* factory.setDataPolicy(DataPolicy.REPLICATE);
*/
HARegionQueueAttributes harqAttr = new HARegionQueueAttributes();
harqAttr.setExpiryTime(3);
hrq = HARegionQueue.getHARegionQueueInstance("HARegionQueueDUnitTest_region", cache, harqAttr,
HARegionQueue.NON_BLOCKING_HA_QUEUE, false, disabledClock());
}
private static void clearRegion() {
try {
for (final Object o : hrq.getRegion().keys()) {
hrq.getRegion().localDestroy(o);
}
} catch (Exception e) {
fail("Exception occurred while trying to destroy region", e);
}
}
private static void verifyAddingDispatchMesgs() {
assertTrue(HARegionQueue.getDispatchedMessagesMapForTesting().isEmpty());
hrq.addDispatchedMessage(new ThreadIdentifier(new byte[1], 1), 1);
assertTrue(!HARegionQueue.getDispatchedMessagesMapForTesting().isEmpty());
}
private static void verifyDispatchedMessagesRemoved() {
try {
final Region region = hrq.getRegion();
// wait until we have a dead server
WaitCriterion ev = new WaitCriterion() {
@Override
public boolean done() {
Thread.yield(); // TODO is this necessary?
return region.get(0L) == null;
}
@Override
public String description() {
return null;
}
};
GeodeAwaitility.await().untilAsserted(ev);
/*
* if (region.get(new Long(0)) != null) { fail("Expected message to have been deleted but it
* is not deleted"); }
*/
if (region.get(1L) == null) {
fail("Expected message not to have been deleted but it is deleted");
}
} catch (Exception e) {
fail("test failed due to an exception", e);
}
}
/**
* close the cache
*/
private static void closeCache() {
if (cache != null && !cache.isClosed()) {
cache.close();
cache.getDistributedSystem().disconnect();
}
}
/**
* do puts on key-1
*/
private static void putValue1() {
try {
Region r1 = cache.getRegion(SEPARATOR + "HARegionQueueDUnitTest_region");
r1.put("key-1", "value-1");
} catch (Exception ex) {
fail("failed while region.put()", ex);
}
}
private static void putConflatables() {
try {
Region r1 = hrq.getRegion();
for (int i = 1; i < 11; i++) {
r1.put((long) i, new ConflatableObject("key" + i, "value" + i,
new EventID(new byte[] {1}, 1, i), true, "HARegionQueueDUnitTest_region"));
}
} catch (Exception ex) {
fail("failed while region.put()", ex);
}
}
/**
* verifies the data has been populated correctly after GII
*/
private static void verifyMapsAndData() {
try {
HARegion r1 = hrq.getRegion();
// region should not be null
assertNotNull(" Did not expect the HARegion to be null but it is", r1);
// it should have ten non null entries
for (int i = 1; i < 11; i++) {
assertNotNull(" Did not expect the entry to be null but it is", r1.get((long) i));
}
// HARegionQueue should not be null
assertNotNull(" Did not expect the HARegionQueue to be null but it is", hrq);
Map conflationMap = hrq.getConflationMapForTesting();
// conflationMap size should be greater than 0
assertTrue(" Did not expect the conflationMap size to be 0 but it is",
conflationMap.size() > 0);
Map internalMap = (Map) conflationMap.get("HARegionQueueDUnitTest_region");
// internal map should not be null. it should be present
assertNotNull(" Did not expect the internalMap to be null but it is", internalMap);
// get and verify the entries in the conflation map.
for (int i = 1; i < 11; i++) {
assertTrue(" Did not expect the entry not to be equal but it is",
internalMap.get("key" + i).equals((long) i));
}
Map eventMap = hrq.getEventsMapForTesting();
// DACE should not be null
assertNotNull(" Did not expect the result (DACE object) to be null but it is",
eventMap.get(new ThreadIdentifier(new byte[] {1}, 1)));
Set counterSet = hrq.getCurrentCounterSet(new EventID(new byte[] {1}, 1, 1));
assertTrue(" excpected the counter set size to be 10 but it is not so",
counterSet.size() == 10);
long i = 1;
// verify the order of the iteration. it should be 1 - 10. The underlying
// set is a LinkedHashSet
for (final Object o : counterSet) {
assertTrue((Long) o == i);
i++;
}
// The last dispactchde sequence Id should be -1 since no dispatch has
// been made
assertTrue(hrq.getLastDispatchedSequenceId(new EventID(new byte[] {1}, 1, 1)) == -1);
// sleep for 8.0 seconds. Everythign should expire and everything should
// be null and empty
Thread.sleep(7500);
for (int j = 1; j < 11; j++) {
assertNull("expected the entry to be null since expiry time exceeded but it is not so",
r1.get((long) j));
}
internalMap = (Map) hrq.getConflationMapForTesting().get("HARegionQueueDUnitTest_region");
assertNotNull(" Did not expect the internalMap to be null but it is", internalMap);
assertTrue(
"internalMap (conflation) should have been emptry since expiry of all entries has been exceeded but it is not so",
internalMap.isEmpty());
assertTrue(
"eventMap should have been emptry since expiry of all entries has been exceeded but it is not so",
eventMap.isEmpty());
assertTrue(
"counter set should have been emptry since expiry of all entries has been exceeded but it is not so",
counterSet.isEmpty());
} catch (Exception ex) {
fail("failed while region.put()", ex);
}
}
/**
* do puts on key-1,value-2
*/
private static void putValue2() {
try {
Region r1 = cache.getRegion(SEPARATOR + "HARegionQueueDUnitTest_region");
r1.put("key-1", "value-2");
} catch (Exception ex) {
fail("failed while region.put()", ex);
}
}
/**
* do a get on region1
*/
private static void getValue1() {
try {
Region r = cache.getRegion(SEPARATOR + "HARegionQueueDUnitTest_region");
if (!(r.get("key-1").equals("value-1"))) {
fail("expected value to be value-1 but it is not so");
}
} catch (Exception ex) {
fail("failed while region.get()", ex);
}
}
/**
* do a get on region1
*/
private static void getNull() {
try {
Region r = cache.getRegion(SEPARATOR + "HARegionQueueDUnitTest_region");
if (!(r.get("key-1") == (null))) {
fail("expected value to be null but it is not so");
}
} catch (Exception ex) {
fail("failed while region.get()", ex);
}
}
/**
* do a get on region1
*/
public static void getValue2() {
try {
Region r = cache.getRegion(SEPARATOR + "HARegionQueueDUnitTest_region");
if (!(r.get("key-1").equals("value-2"))) {
fail("expected value to be value-2 but it is not so");
}
} catch (Exception ex) {
fail("failed while region.get()", ex);
}
}
/**
* destroy key-1
*/
public static void destroy() {
try {
Region region1 = cache.getRegion(SEPARATOR + "HARegionQueueDUnitTest_region");
region1.localDestroy("key-1");
} catch (Exception e) {
fail("test failed due to exception in destroy", e);
}
}
/**
* Tests the Non Blocking HARegionQueue by doing concurrent put /remove / take / peek , batch peek
* operations in multiple regions. The test will have take/remove occuring in all the VMs. This
* test is targetted to test for hang or exceptions in non blocking queue.
*/
@Test
public void testConcurrentOperationsDunitTestOnNonBlockingQueue() throws Exception {
concurrentOperationsDunitTest(false, Scope.DISTRIBUTED_ACK);
}
/**
* Tests the Non Blocking HARegionQueue by doing concurrent put /remove / take / peek , batch peek
* operations in multiple regions. The test will have take/remove occuring in all the VMs. This
* test is targetted to test for hang or exceptions in non blocking queue.
*/
@Test
public void testConcurrentOperationsDunitTestOnNonBlockingQueueWithDNoAckRegion()
throws Exception {
concurrentOperationsDunitTest(false, Scope.DISTRIBUTED_NO_ACK);
}
/**
* Tests the Blokcing HARegionQueue by doing concurrent put /remove / take / peek , batch peek
* operations in multiple regions. The test will have take/remove occuring in all the VMs. This
* test is targetted to test for hang or exceptions in blocking queue.
*/
@Test
public void testConcurrentOperationsDunitTestOnBlockingQueue() throws Exception {
concurrentOperationsDunitTest(true, Scope.DISTRIBUTED_ACK);
}
private void concurrentOperationsDunitTest(final boolean createBlockingQueue,
final Scope rscope) {
// Create Cache and HARegionQueue in all the 4 VMs.
CacheSerializableRunnable createRgnsAndQueues = new CacheSerializableRunnable(
"CreateCache, mirrored Region & HARegionQueue with a CacheListener") {
@Override
public void run2() throws CacheException {
HARegionQueueDUnitTest test = new HARegionQueueDUnitTest();
System.getProperties().put("QueueRemovalThreadWaitTime", "2000");
cache = test.createCache();
AttributesFactory factory = new AttributesFactory();
factory.setScope(rscope);
factory.setDataPolicy(DataPolicy.REPLICATE);
HARegionQueueAttributes hrqa = new HARegionQueueAttributes();
hrqa.setExpiryTime(5);
try {
if (createBlockingQueue) {
hrq = HARegionQueue.getHARegionQueueInstance("testregion1", cache, hrqa,
HARegionQueue.BLOCKING_HA_QUEUE, false, disabledClock());
} else {
hrq = HARegionQueue.getHARegionQueueInstance("testregion1", cache, hrqa,
HARegionQueue.NON_BLOCKING_HA_QUEUE, false, disabledClock());
}
} catch (Exception e) {
throw new AssertionError(e);
}
factory.addCacheListener(new CacheListenerAdapter() {
@Override
public void afterCreate(final EntryEvent event) {
if (toCnt) {
Conflatable conflatable = new ConflatableObject(event.getKey(), event.getNewValue(),
((EntryEventImpl) event).getEventId(), false, event.getRegion().getFullPath());
try {
hrq.put(conflatable);
} catch (Exception e) {
fail("The put operation in queue did not succeed due to exception =", e);
}
}
}
@Override
public void afterUpdate(final EntryEvent event) {
if (toCnt) {
Conflatable conflatable = new ConflatableObject(event.getKey(), event.getNewValue(),
((EntryEventImpl) event).getEventId(), true, event.getRegion().getFullPath());
try {
hrq.put(conflatable);
} catch (Exception e) {
fail("The put operation in queue did not succeed due to exception =", e);
}
}
}
});
cache.createRegion("test_region", factory.create());
}
};
vm0.invoke(createRgnsAndQueues);
vm1.invoke(createRgnsAndQueues);
vm2.invoke(createRgnsAndQueues);
vm3.invoke(createRgnsAndQueues);
CacheSerializableRunnable spawnThreadsAndperformOps =
new CacheSerializableRunnable("Spawn multiple threads which do various operations") {
@Override
public void run2() throws CacheException {
opThreads = new RunOp[4 + 2 + 2 + 2];
for (int i = 0; i < 4; ++i) {
opThreads[i] = new RunOp(RunOp.PUT, i);
}
for (int i = 4; i < 6; ++i) {
opThreads[i] = new RunOp(RunOp.PEEK, i);
}
for (int i = 6; i < 8; ++i) {
opThreads[i] = new RunOp(RunOp.TAKE, i);
}
for (int i = 8; i < 10; ++i) {
opThreads[i] = new RunOp(RunOp.TAKE, i);
}
for (final Thread opThread : opThreads) {
opThread.start();
}
}
};
vm0.invokeAsync(spawnThreadsAndperformOps);
vm1.invokeAsync(spawnThreadsAndperformOps);
vm2.invokeAsync(spawnThreadsAndperformOps);
vm3.invokeAsync(spawnThreadsAndperformOps);
SerializableCallable guaranteeOperationsOccured =
new SerializableCallable("Check Ops Occurred") {
@Override
public Object call() throws CacheException {
if (opThreads == null) {
return false;
}
for (final Thread opThread : opThreads) {
if (((RunOp) opThread).getNumOpsPerformed() == 0) {
return false;
}
}
return true;
}
};
await()
.untilAsserted(() -> assertTrue((Boolean) vm0.invoke(guaranteeOperationsOccured)));
await()
.untilAsserted(() -> assertTrue((Boolean) vm1.invoke(guaranteeOperationsOccured)));
await()
.untilAsserted(() -> assertTrue((Boolean) vm2.invoke(guaranteeOperationsOccured)));
await()
.untilAsserted(() -> assertTrue((Boolean) vm3.invoke(guaranteeOperationsOccured)));
// In case of blocking HARegionQueue do some extra puts so that the
// blocking threads
// are exited
CacheSerializableRunnable toggleFlag =
new CacheSerializableRunnable("Toggle the flag to signal end of threads") {
@Override
public void run2() throws CacheException {
toCnt = false;
if (createBlockingQueue) {
try {
for (int i = 0; i < 100; ++i) {
hrq.put(new ConflatableObject("1", "1", new EventID(new byte[] {1}, 100, i),
false, SEPARATOR + "x"));
}
} catch (Exception e) {
throw new AssertionError(e);
}
}
}
};
vm0.invokeAsync(toggleFlag);
vm1.invokeAsync(toggleFlag);
vm2.invokeAsync(toggleFlag);
vm3.invokeAsync(toggleFlag);
CacheSerializableRunnable joinWithThreads =
new CacheSerializableRunnable("Join with the threads") {
@Override
public void run2() throws CacheException {
for (final Thread opThread : opThreads) {
if (opThread.isInterrupted()) {
fail("Test failed because thread encountered exception");
}
ThreadUtils.join(opThread, 30 * 1000);
}
}
};
vm0.invoke(joinWithThreads);
vm1.invoke(joinWithThreads);
vm2.invoke(joinWithThreads);
vm3.invoke(joinWithThreads);
System.getProperties().remove("QueueRemovalThreadWaitTime");
}
/**
* This is to test the bug which is caused when HARegionQueue object hasnot been fully constructed
* but as the HARegion has got constructed , it gets visible to QRM Message Thread.
*
* TODO: this test runs too long! Shorten run time. 1m 40s on new Mac.
*/
@Test
public void testNPEDueToHARegionQueueEscapeInConstructor() {
// changing EXPIRY_TIME to 5 doesn't change how long the test runs!
final int EXPIRY_TIME = 30; // test will run for this many seconds
// Create two HARegionQueue 's in the two VMs. The frequency of QRM thread
// should be high
// Check for NullPointeException in the other VM.
CacheSerializableRunnable createQueuesAndThread =
new CacheSerializableRunnable("CreateCache, HARegionQueue and start thread") {
@Override
public void run2() throws CacheException {
HARegionQueueDUnitTest test = new HARegionQueueDUnitTest();
// TODO:ASIF: Bcoz of the QRM thread cannot take frequency below
// 1 second , thus we need to carfully evaluate what to do.
// For this bug to appear ,without bugfix , qrm needs to run
// very fast.
// System.getProperties().put("QueueRemovalThreadWaitTime", new Long(10));
cache = test.createCache();
cache.setMessageSyncInterval(1);
HARegionQueueAttributes hrqa = new HARegionQueueAttributes();
hrqa.setExpiryTime(EXPIRY_TIME);
try {
hrq = HARegionQueue.getHARegionQueueInstance(
"testNPEDueToHARegionQueueEscapeInConstructor", cache, hrqa,
HARegionQueue.NON_BLOCKING_HA_QUEUE, false, disabledClock());
// changing OP_COUNT to 20 makes no difference in test time
final int OP_COUNT = 200;
// Do 1000 putand 100 take in a separate thread
for (int i = 0; i < OP_COUNT; ++i) {
hrq.put(new ConflatableObject((long) i, (long) i,
new EventID(new byte[] {0}, 1, i), false, "dummy"));
}
opThreads = new Thread[1];
opThreads[0] = new Thread(() -> {
for (int i = 0; i < OP_COUNT; ++i) {
try {
Object o = hrq.take();
if (o == null) {
Thread.sleep(50);
}
} catch (InterruptedException e) {
throw new AssertionError(e);
}
}
});
opThreads[0].start();
} catch (Exception e) {
throw new AssertionError(e);
}
}
};
CacheSerializableRunnable createQueues =
new CacheSerializableRunnable("CreateCache, HARegionQueue ") {
@Override
public void run2() throws CacheException {
createQueuesThread = Thread.currentThread();
HARegionQueueDUnitTest test = new HARegionQueueDUnitTest();
// System.getProperties().put("QueueRemovalThreadWaitTime",
// new Long(120000));
cache = test.createCache();
cache.setMessageSyncInterval(EXPIRY_TIME);
HARegionQueueAttributes hrqa = new HARegionQueueAttributes();
hrqa.setExpiryTime(EXPIRY_TIME);
try {
hrq = HARegionQueue.getHARegionQueueInstance(
"testNPEDueToHARegionQueueEscapeInConstructor", cache, hrqa,
HARegionQueue.NON_BLOCKING_HA_QUEUE, false, disabledClock());
} catch (Exception e) {
throw new AssertionError(e);
}
}
};
CacheSerializableRunnable waitForCreateQueuesThread =
new CacheSerializableRunnable("joinCreateCache") {
@Override
public void run2() {
WaitCriterion ev = new WaitCriterion() {
@Override
public boolean done() {
return createQueuesThread != null;
}
@Override
public String description() {
return null;
}
};
GeodeAwaitility.await().untilAsserted(ev);
join(createQueuesThread, 300 * 1000);
}
};
vm0.invoke(createQueuesAndThread);
vm1.invokeAsync(createQueues);
CacheSerializableRunnable joinWithThread =
new CacheSerializableRunnable("CreateCache, HARegionQueue join with thread") {
@Override
public void run2() throws CacheException {
if (opThreads[0].isInterrupted()) {
fail("The test has failed as it encountered interrupts in puts & takes");
}
ThreadUtils.join(opThreads[0], 30 * 1000);
}
};
vm0.invoke(joinWithThread);
vm1.invoke(waitForCreateQueuesThread);
}
private static class RunOp extends Thread {
private static final int PUT = 1;
private static final int TAKE = 2;
private static final int PEEK = 3;
private static final int BATCH_PEEK = 4;
private final int opType;
private final int threadID;
private int numOpsPerformed = 0;
public RunOp(int opType, int id) {
super("ID=" + id + ",Op=" + opType);
this.opType = opType;
threadID = id;
}
public int getNumOpsPerformed() {
return numOpsPerformed;
}
@Override
public void run() {
Region rgn = cache.getRegion("test_region");
int counter = 0;
LogWriter logger = cache.getLogger();
Conflatable cnf;
try {
while (toCnt) {
// Thread.currentThread().getName() + " before doing operation of
// type= "+ this.opType);
switch (opType) {
case PUT:
rgn.put("key" + threadID, "val" + counter++);
if (counter == 10) {
counter = 0;
}
break;
case TAKE:
cnf = (Conflatable) hrq.take();
if (logger.fineEnabled() && cnf != null) {
logger.fine("Object retrieved by take has key =" + cnf.getKeyToConflate()
+ " and value as" + cnf.getValueToConflate());
}
break;
case PEEK:
cnf = (Conflatable) hrq.peek();
if (logger.fineEnabled() && cnf != null) {
logger.fine("Object retrieved by peek has key =" + cnf.getKeyToConflate()
+ " and value as" + cnf.getValueToConflate());
}
// Thread.currentThread().getName() + " before doing remove= "+
// this.opType);
hrq.remove();
break;
case BATCH_PEEK:
List confList = hrq.peek(3, 2000);
if (logger.fineEnabled() && confList != null) {
logger.fine("Object retrieved by batch peek are =" + confList);
}
// Thread.currentThread().getName() + " before doing remove= "+
// this.opType);
hrq.remove();
break;
}
numOpsPerformed++;
// Thread.currentThread().getName() + " after Operation of type= "+
// this.opType);
}
} catch (Exception e) {
throw new AssertionError(e);
}
}
}
/**
* Checks the data received by GII, only gets expired after proper construction of HARegionQueue
* object.
*/
private static void createHARegionQueueandCheckExpiration() throws Exception {
HARegionQueueDUnitTest test = new HARegionQueueDUnitTest();
cache = test.createCache();
HARegionQueueAttributes attrs = new HARegionQueueAttributes();
attrs.setExpiryTime(1);
hrq = getHARegionQueueInstance("HARegionQueueDUnitTest_region", cache, attrs,
NON_BLOCKING_HA_QUEUE, false, disabledClock());
// wait until we have a dead
// server
WaitCriterion ev = new WaitCriterion() {
@Override
public boolean done() {
return hrq.getAvailableIds().size() == 0;
}
@Override
public String description() {
return null;
}
};
GeodeAwaitility.await().untilAsserted(ev);
// assertIndexDetailsEquals(0, hrq.getAvailableIds().size());
}
@Test
public void testForDuplicateEvents() throws Exception {
vm0.invoke(HARegionQueueDUnitTest::createRegionQueue);
vm1.invoke(HARegionQueueDUnitTest::createRegionQueueandCheckDuplicates);
}
/**
* HARegionQueue should not allow data with duplicate EventIds.
*/
private static void createRegionQueueandCheckDuplicates() throws Exception {
HARegionQueueDUnitTest test = new HARegionQueueDUnitTest();
cache = test.createCache();
hrq = HARegionQueue.getHARegionQueueInstance("HARegionQueueDUnitTest_region", cache,
HARegionQueue.NON_BLOCKING_HA_QUEUE, false, disabledClock());
assertEquals(2, hrq.size());
EventID id1 = new EventID(new byte[] {1}, 1, 1);
EventID id2 = new EventID(new byte[] {1}, 1, 2);
ConflatableObject c1 =
new ConflatableObject("1", "1", id1, false, "HARegionQueueDUnitTest_region");
ConflatableObject c2 =
new ConflatableObject("2", "2", id2, false, "HARegionQueueDUnitTest_region");
hrq.put(c1);
hrq.put(c2);
// HARegion size should be 2 as data with same EventIDs is inserted into the queue
assertEquals(2, hrq.size());
}
}
|
apache/maven | 37,364 | compat/maven-compat/src/main/java/org/apache/maven/repository/legacy/resolver/DefaultLegacyArtifactCollector.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
*/
package org.apache.maven.repository.legacy.resolver;
import javax.inject.Inject;
import javax.inject.Named;
import javax.inject.Singleton;
import java.util.ArrayList;
import java.util.Collections;
import java.util.Iterator;
import java.util.LinkedHashMap;
import java.util.LinkedHashSet;
import java.util.List;
import java.util.Map;
import java.util.Set;
import org.apache.maven.artifact.Artifact;
import org.apache.maven.artifact.metadata.ArtifactMetadataSource;
import org.apache.maven.artifact.metadata.ResolutionGroup;
import org.apache.maven.artifact.repository.ArtifactRepository;
import org.apache.maven.artifact.resolver.ArtifactResolutionException;
import org.apache.maven.artifact.resolver.ArtifactResolutionRequest;
import org.apache.maven.artifact.resolver.ArtifactResolutionResult;
import org.apache.maven.artifact.resolver.CyclicDependencyException;
import org.apache.maven.artifact.resolver.ResolutionListener;
import org.apache.maven.artifact.resolver.ResolutionListenerForDepMgmt;
import org.apache.maven.artifact.resolver.ResolutionNode;
import org.apache.maven.artifact.resolver.filter.AndArtifactFilter;
import org.apache.maven.artifact.resolver.filter.ArtifactFilter;
import org.apache.maven.artifact.versioning.ArtifactVersion;
import org.apache.maven.artifact.versioning.ManagedVersionMap;
import org.apache.maven.artifact.versioning.OverConstrainedVersionException;
import org.apache.maven.artifact.versioning.VersionRange;
import org.apache.maven.execution.MavenSession;
import org.apache.maven.plugin.LegacySupport;
import org.apache.maven.repository.legacy.metadata.ArtifactMetadataRetrievalException;
import org.apache.maven.repository.legacy.metadata.DefaultMetadataResolutionRequest;
import org.apache.maven.repository.legacy.metadata.MetadataResolutionRequest;
import org.apache.maven.repository.legacy.resolver.conflict.ConflictResolver;
import org.codehaus.plexus.logging.Logger;
/**
*/
@Named
@Singleton
@Deprecated
public class DefaultLegacyArtifactCollector implements LegacyArtifactCollector {
@Inject
@Named("nearest")
private ConflictResolver defaultConflictResolver;
@Inject
private Logger logger;
@Inject
private LegacySupport legacySupport;
private void injectSession(ArtifactResolutionRequest request) {
MavenSession session = legacySupport.getSession();
if (session != null) {
request.setOffline(session.isOffline());
request.setForceUpdate(session.getRequest().isUpdateSnapshots());
request.setServers(session.getRequest().getServers());
request.setMirrors(session.getRequest().getMirrors());
request.setProxies(session.getRequest().getProxies());
}
}
@Override
@SuppressWarnings("checkstyle:parameternumber")
public ArtifactResolutionResult collect(
Set<Artifact> artifacts,
Artifact originatingArtifact,
Map<String, Artifact> managedVersions,
ArtifactRepository localRepository,
List<ArtifactRepository> remoteRepositories,
ArtifactMetadataSource source,
ArtifactFilter filter,
List<ResolutionListener> listeners,
List<ConflictResolver> conflictResolvers) {
ArtifactResolutionRequest request = new ArtifactResolutionRequest();
request.setLocalRepository(localRepository);
request.setRemoteRepositories(remoteRepositories);
injectSession(request);
return collect(
artifacts, originatingArtifact, managedVersions, request, source, filter, listeners, conflictResolvers);
}
@Override
@SuppressWarnings("checkstyle:parameternumber")
public ArtifactResolutionResult collect(
Set<Artifact> artifacts,
Artifact originatingArtifact,
Map<String, Artifact> managedVersions,
ArtifactResolutionRequest repositoryRequest,
ArtifactMetadataSource source,
ArtifactFilter filter,
List<ResolutionListener> listeners,
List<ConflictResolver> conflictResolvers) {
ArtifactResolutionResult result = new ArtifactResolutionResult();
result.setOriginatingArtifact(originatingArtifact);
if (conflictResolvers == null) {
conflictResolvers = Collections.singletonList(defaultConflictResolver);
}
Map<Object, List<ResolutionNode>> resolvedArtifacts = new LinkedHashMap<>();
ResolutionNode root = new ResolutionNode(originatingArtifact, repositoryRequest.getRemoteRepositories());
try {
root.addDependencies(artifacts, repositoryRequest.getRemoteRepositories(), filter);
} catch (CyclicDependencyException e) {
result.addCircularDependencyException(e);
return result;
} catch (OverConstrainedVersionException e) {
result.addVersionRangeViolation(e);
return result;
}
ManagedVersionMap versionMap = getManagedVersionsMap(originatingArtifact, managedVersions);
try {
recurse(
result,
root,
resolvedArtifacts,
versionMap,
repositoryRequest,
source,
filter,
listeners,
conflictResolvers);
} catch (CyclicDependencyException e) {
logger.debug("While recursing: " + e.getMessage(), e);
result.addCircularDependencyException(e);
} catch (OverConstrainedVersionException e) {
logger.debug("While recursing: " + e.getMessage(), e);
result.addVersionRangeViolation(e);
} catch (ArtifactResolutionException e) {
logger.debug("While recursing: " + e.getMessage(), e);
result.addErrorArtifactException(e);
}
Set<ResolutionNode> set = new LinkedHashSet<>();
for (List<ResolutionNode> nodes : resolvedArtifacts.values()) {
for (ResolutionNode node : nodes) {
if (!node.equals(root) && node.isActive()) {
Artifact artifact = node.getArtifact();
try {
if (node.filterTrail(filter)) {
// If it was optional and not a direct dependency,
// we don't add it or its children, just allow the update of the version and artifactScope
if (node.isChildOfRootNode() || !artifact.isOptional()) {
artifact.setDependencyTrail(node.getDependencyTrail());
set.add(node);
// This is required right now.
result.addArtifact(artifact);
}
}
} catch (OverConstrainedVersionException e) {
result.addVersionRangeViolation(e);
}
}
}
}
result.setArtifactResolutionNodes(set);
return result;
}
/**
* Get the map of managed versions, removing the originating artifact if it is also in managed versions
*
* @param originatingArtifact artifact we are processing
* @param managedVersions original managed versions
*/
private ManagedVersionMap getManagedVersionsMap(
Artifact originatingArtifact, Map<String, Artifact> managedVersions) {
ManagedVersionMap versionMap;
if (managedVersions instanceof ManagedVersionMap managedVersionMap) {
versionMap = managedVersionMap;
} else {
versionMap = new ManagedVersionMap(managedVersions);
}
// remove the originating artifact if it is also in managed versions to avoid being modified during resolution
Artifact managedOriginatingArtifact = versionMap.get(originatingArtifact.getDependencyConflictId());
if (managedOriginatingArtifact != null) {
// TODO we probably want to warn the user that he is building an artifact with
// different values than in dependencyManagement
if (managedVersions instanceof ManagedVersionMap) {
/* avoid modifying the managedVersions parameter creating a new map */
versionMap = new ManagedVersionMap(managedVersions);
}
versionMap.remove(originatingArtifact.getDependencyConflictId());
}
return versionMap;
}
@SuppressWarnings({"checkstyle:parameternumber", "checkstyle:methodlength"})
private void recurse(
ArtifactResolutionResult result,
ResolutionNode node,
Map<Object, List<ResolutionNode>> resolvedArtifacts,
ManagedVersionMap managedVersions,
ArtifactResolutionRequest request,
ArtifactMetadataSource source,
ArtifactFilter filter,
List<ResolutionListener> listeners,
List<ConflictResolver> conflictResolvers)
throws ArtifactResolutionException {
fireEvent(ResolutionListener.TEST_ARTIFACT, listeners, node);
Object key = node.getKey();
// TODO Does this check need to happen here? Had to add the same call
// below when we iterate on child nodes -- will that suffice?
if (managedVersions.containsKey(key)) {
manageArtifact(node, managedVersions, listeners);
}
List<ResolutionNode> previousNodes = resolvedArtifacts.get(key);
if (previousNodes != null) {
for (ResolutionNode previous : previousNodes) {
try {
if (previous.isActive()) {
// Version mediation
VersionRange previousRange = previous.getArtifact().getVersionRange();
VersionRange currentRange = node.getArtifact().getVersionRange();
if ((previousRange != null) && (currentRange != null)) {
// TODO shouldn't need to double up on this work, only done for simplicity of handling
// recommended
// version but the restriction is identical
VersionRange newRange = previousRange.restrict(currentRange);
// TODO ick. this forces the OCE that should have come from the previous call. It is still
// correct
if (newRange.isSelectedVersionKnown(previous.getArtifact())) {
fireEvent(
ResolutionListener.RESTRICT_RANGE,
listeners,
node,
previous.getArtifact(),
newRange);
}
previous.getArtifact().setVersionRange(newRange);
node.getArtifact().setVersionRange(currentRange.restrict(previousRange));
// Select an appropriate available version from the (now restricted) range
// Note this version was selected before to get the appropriate POM
// But it was reset by the call to setVersionRange on restricting the version
ResolutionNode[] resetNodes = {previous, node};
for (int j = 0; j < 2; j++) {
Artifact resetArtifact = resetNodes[j].getArtifact();
// MNG-2123: if the previous node was not a range, then it wouldn't have any available
// versions. We just clobbered the selected version above. (why? I have no idea.)
// So since we are here and this is ranges we must go figure out the version (for a
// third time...)
if (resetArtifact.getVersion() == null && resetArtifact.getVersionRange() != null) {
// go find the version. This is a total hack. See previous comment.
List<ArtifactVersion> versions = resetArtifact.getAvailableVersions();
if (versions == null) {
try {
MetadataResolutionRequest metadataRequest =
new DefaultMetadataResolutionRequest(request);
metadataRequest.setArtifact(resetArtifact);
versions = source.retrieveAvailableVersions(metadataRequest);
resetArtifact.setAvailableVersions(versions);
} catch (ArtifactMetadataRetrievalException e) {
resetArtifact.setDependencyTrail(node.getDependencyTrail());
throw new ArtifactResolutionException(
"Unable to get dependency information: " + e.getMessage(),
resetArtifact,
request.getRemoteRepositories(),
e);
}
}
// end hack
// MNG-2861: match version can return null
ArtifactVersion selectedVersion = resetArtifact
.getVersionRange()
.matchVersion(resetArtifact.getAvailableVersions());
if (selectedVersion != null) {
resetArtifact.selectVersion(selectedVersion.toString());
} else {
throw new OverConstrainedVersionException(
"Unable to find a version in " + resetArtifact.getAvailableVersions()
+ " to match the range " + resetArtifact.getVersionRange(),
resetArtifact);
}
fireEvent(ResolutionListener.SELECT_VERSION_FROM_RANGE, listeners, resetNodes[j]);
}
}
}
// Conflict Resolution
ResolutionNode resolved = null;
for (Iterator<ConflictResolver> j = conflictResolvers.iterator();
resolved == null && j.hasNext(); ) {
ConflictResolver conflictResolver = j.next();
resolved = conflictResolver.resolveConflict(previous, node);
}
if (resolved == null) {
// TODO add better exception that can detail the two conflicting artifacts
ArtifactResolutionException are = new ArtifactResolutionException(
"Cannot resolve artifact version conflict between "
+ previous.getArtifact().getVersion() + " and "
+ node.getArtifact().getVersion(),
previous.getArtifact());
result.addVersionRangeViolation(are);
}
if ((resolved != previous) && (resolved != node)) {
// TODO add better exception
result.addVersionRangeViolation(new ArtifactResolutionException(
"Conflict resolver returned unknown resolution node: ", resolved.getArtifact()));
}
// TODO should this be part of mediation?
// previous one is more dominant
ResolutionNode nearest;
ResolutionNode farthest;
if (resolved == previous) {
nearest = previous;
farthest = node;
} else {
nearest = node;
farthest = previous;
}
if (checkScopeUpdate(farthest, nearest, listeners)) {
// if we need to update artifactScope of nearest to use farthest artifactScope, use the
// nearest version, but farthest artifactScope
nearest.disable();
farthest.getArtifact()
.setVersion(nearest.getArtifact().getVersion());
fireEvent(ResolutionListener.OMIT_FOR_NEARER, listeners, nearest, farthest.getArtifact());
} else {
farthest.disable();
fireEvent(ResolutionListener.OMIT_FOR_NEARER, listeners, farthest, nearest.getArtifact());
}
}
} catch (OverConstrainedVersionException e) {
result.addVersionRangeViolation(e);
}
}
} else {
previousNodes = new ArrayList<>();
resolvedArtifacts.put(key, previousNodes);
}
previousNodes.add(node);
if (node.isActive()) {
fireEvent(ResolutionListener.INCLUDE_ARTIFACT, listeners, node);
}
// don't pull in the transitive deps of a system-scoped dependency.
if (node.isActive() && !Artifact.SCOPE_SYSTEM.equals(node.getArtifact().getScope())) {
fireEvent(ResolutionListener.PROCESS_CHILDREN, listeners, node);
Artifact parentArtifact = node.getArtifact();
for (Iterator<ResolutionNode> i = node.getChildrenIterator(); i.hasNext(); ) {
ResolutionNode child = i.next();
try {
// We leave in optional ones, but don't pick up its dependencies
if (!child.isResolved() && (!child.getArtifact().isOptional() || child.isChildOfRootNode())) {
Artifact artifact = child.getArtifact();
artifact.setDependencyTrail(node.getDependencyTrail());
List<ArtifactRepository> childRemoteRepositories = child.getRemoteRepositories();
MetadataResolutionRequest metadataRequest = new DefaultMetadataResolutionRequest(request);
metadataRequest.setArtifact(artifact);
metadataRequest.setRemoteRepositories(childRemoteRepositories);
try {
ResolutionGroup rGroup;
Object childKey;
do {
childKey = child.getKey();
if (managedVersions.containsKey(childKey)) {
// If this child node is a managed dependency, ensure
// we are using the dependency management version
// of this child if applicable b/c we want to use the
// managed version's POM, *not* any other version's POM.
// We retrieve the POM below in the retrieval step.
manageArtifact(child, managedVersions, listeners);
// Also, we need to ensure that any exclusions it presents are
// added to the artifact before we retrieve the metadata
// for the artifact; otherwise we may end up with unwanted
// dependencies.
Artifact ma = managedVersions.get(childKey);
ArtifactFilter managedExclusionFilter = ma.getDependencyFilter();
if (null != managedExclusionFilter) {
if (null != artifact.getDependencyFilter()) {
AndArtifactFilter aaf = new AndArtifactFilter();
aaf.add(artifact.getDependencyFilter());
aaf.add(managedExclusionFilter);
artifact.setDependencyFilter(aaf);
} else {
artifact.setDependencyFilter(managedExclusionFilter);
}
}
}
if (artifact.getVersion() == null) {
// set the recommended version
// TODO maybe its better to just pass the range through to retrieval and use a
// transformation?
ArtifactVersion version;
if (!artifact.isSelectedVersionKnown()) {
List<ArtifactVersion> versions = artifact.getAvailableVersions();
if (versions == null) {
versions = source.retrieveAvailableVersions(metadataRequest);
artifact.setAvailableVersions(versions);
}
Collections.sort(versions);
VersionRange versionRange = artifact.getVersionRange();
version = versionRange.matchVersion(versions);
if (version == null) {
if (versions.isEmpty()) {
throw new OverConstrainedVersionException(
"No versions are present in the repository for the artifact"
+ " with a range " + versionRange,
artifact,
childRemoteRepositories);
}
throw new OverConstrainedVersionException(
"Couldn't find a version in " + versions + " to match range "
+ versionRange,
artifact,
childRemoteRepositories);
}
} else {
version = artifact.getSelectedVersion();
}
artifact.selectVersion(version.toString());
fireEvent(ResolutionListener.SELECT_VERSION_FROM_RANGE, listeners, child);
}
rGroup = source.retrieve(metadataRequest);
if (rGroup == null) {
break;
}
} while (!childKey.equals(child.getKey()));
if (parentArtifact != null
&& parentArtifact.getDependencyFilter() != null
&& !parentArtifact.getDependencyFilter().include(artifact)) {
// MNG-3769: the [probably relocated] artifact is excluded.
// We could process exclusions on relocated artifact details in the
// MavenMetadataSource.createArtifacts(..) step, BUT that would
// require resolving the POM from the repository very early on in
// the build.
continue;
}
// TODO might be better to have source.retrieve() throw a specific exception for this
// situation
// and catch here rather than have it return null
if (rGroup == null) {
// relocated dependency artifact is declared excluded, no need to add and recurse
// further
continue;
}
child.addDependencies(rGroup.getArtifacts(), rGroup.getResolutionRepositories(), filter);
} catch (CyclicDependencyException e) {
// would like to throw this, but we have crappy stuff in the repo
fireEvent(
ResolutionListener.OMIT_FOR_CYCLE,
listeners,
new ResolutionNode(e.getArtifact(), childRemoteRepositories, child));
} catch (ArtifactMetadataRetrievalException e) {
artifact.setDependencyTrail(node.getDependencyTrail());
throw new ArtifactResolutionException(
"Unable to get dependency information for " + artifact.getId() + ": "
+ e.getMessage(),
artifact,
childRemoteRepositories,
e);
}
ArtifactResolutionRequest subRequest = new ArtifactResolutionRequest(metadataRequest);
subRequest.setServers(request.getServers());
subRequest.setMirrors(request.getMirrors());
subRequest.setProxies(request.getProxies());
recurse(
result,
child,
resolvedArtifacts,
managedVersions,
subRequest,
source,
filter,
listeners,
conflictResolvers);
}
} catch (OverConstrainedVersionException e) {
result.addVersionRangeViolation(e);
} catch (ArtifactResolutionException e) {
result.addMetadataResolutionException(e);
}
}
fireEvent(ResolutionListener.FINISH_PROCESSING_CHILDREN, listeners, node);
}
}
private void manageArtifact(
ResolutionNode node, ManagedVersionMap managedVersions, List<ResolutionListener> listeners) {
Artifact artifact = managedVersions.get(node.getKey());
// Before we update the version of the artifact, we need to know
// whether we are working on a transitive dependency or not. This
// allows depMgmt to always override transitive dependencies, while
// explicit child override depMgmt (viz. depMgmt should only
// provide defaults to children, but should override transitives).
// We can do this by calling isChildOfRootNode on the current node.
if ((artifact.getVersion() != null)
&& (!node.isChildOfRootNode() || node.getArtifact().getVersion() == null)) {
fireEvent(ResolutionListener.MANAGE_ARTIFACT_VERSION, listeners, node, artifact);
node.getArtifact().setVersion(artifact.getVersion());
}
if ((artifact.getScope() != null)
&& (!node.isChildOfRootNode() || node.getArtifact().getScope() == null)) {
fireEvent(ResolutionListener.MANAGE_ARTIFACT_SCOPE, listeners, node, artifact);
node.getArtifact().setScope(artifact.getScope());
}
if (Artifact.SCOPE_SYSTEM.equals(node.getArtifact().getScope())
&& (node.getArtifact().getFile() == null)
&& (artifact.getFile() != null)) {
fireEvent(ResolutionListener.MANAGE_ARTIFACT_SYSTEM_PATH, listeners, node, artifact);
node.getArtifact().setFile(artifact.getFile());
}
}
/**
* Check if the artifactScope needs to be updated. <a
* href="http://docs.codehaus.org/x/IGU#DependencyMediationandConflictResolution-Scoperesolution">More info</a>.
*
* @param farthest farthest resolution node
* @param nearest nearest resolution node
* @param listeners
*/
boolean checkScopeUpdate(ResolutionNode farthest, ResolutionNode nearest, List<ResolutionListener> listeners) {
boolean updateScope = false;
Artifact farthestArtifact = farthest.getArtifact();
Artifact nearestArtifact = nearest.getArtifact();
/* farthest is runtime and nearest has lower priority, change to runtime */
if (Artifact.SCOPE_RUNTIME.equals(farthestArtifact.getScope())
&& (Artifact.SCOPE_TEST.equals(nearestArtifact.getScope())
|| Artifact.SCOPE_PROVIDED.equals(nearestArtifact.getScope()))) {
updateScope = true;
}
/* farthest is compile and nearest is not (has lower priority), change to compile */
if (Artifact.SCOPE_COMPILE.equals(farthestArtifact.getScope())
&& !Artifact.SCOPE_COMPILE.equals(nearestArtifact.getScope())) {
updateScope = true;
}
/* current POM rules all, if nearest is in current pom, do not update its artifactScope */
if ((nearest.getDepth() < 2) && updateScope) {
updateScope = false;
fireEvent(ResolutionListener.UPDATE_SCOPE_CURRENT_POM, listeners, nearest, farthestArtifact);
}
if (updateScope) {
fireEvent(ResolutionListener.UPDATE_SCOPE, listeners, nearest, farthestArtifact);
// previously we cloned the artifact, but it is more efficient to just update the artifactScope
// if problems are later discovered that the original object needs its original artifactScope value,
// cloning may
// again be appropriate
nearestArtifact.setScope(farthestArtifact.getScope());
}
return updateScope;
}
private void fireEvent(int event, List<ResolutionListener> listeners, ResolutionNode node) {
fireEvent(event, listeners, node, null);
}
private void fireEvent(int event, List<ResolutionListener> listeners, ResolutionNode node, Artifact replacement) {
fireEvent(event, listeners, node, replacement, null);
}
private void fireEvent(
int event,
List<ResolutionListener> listeners,
ResolutionNode node,
Artifact replacement,
VersionRange newRange) {
for (ResolutionListener listener : listeners) {
switch (event) {
case ResolutionListener.TEST_ARTIFACT:
listener.testArtifact(node.getArtifact());
break;
case ResolutionListener.PROCESS_CHILDREN:
listener.startProcessChildren(node.getArtifact());
break;
case ResolutionListener.FINISH_PROCESSING_CHILDREN:
listener.endProcessChildren(node.getArtifact());
break;
case ResolutionListener.INCLUDE_ARTIFACT:
listener.includeArtifact(node.getArtifact());
break;
case ResolutionListener.OMIT_FOR_NEARER:
listener.omitForNearer(node.getArtifact(), replacement);
break;
case ResolutionListener.OMIT_FOR_CYCLE:
listener.omitForCycle(node.getArtifact());
break;
case ResolutionListener.UPDATE_SCOPE:
listener.updateScope(node.getArtifact(), replacement.getScope());
break;
case ResolutionListener.UPDATE_SCOPE_CURRENT_POM:
listener.updateScopeCurrentPom(node.getArtifact(), replacement.getScope());
break;
case ResolutionListener.MANAGE_ARTIFACT_VERSION:
if (listener instanceof ResolutionListenerForDepMgmt asImpl) {
asImpl.manageArtifactVersion(node.getArtifact(), replacement);
} else {
listener.manageArtifact(node.getArtifact(), replacement);
}
break;
case ResolutionListener.MANAGE_ARTIFACT_SCOPE:
if (listener instanceof ResolutionListenerForDepMgmt asImpl) {
asImpl.manageArtifactScope(node.getArtifact(), replacement);
} else {
listener.manageArtifact(node.getArtifact(), replacement);
}
break;
case ResolutionListener.MANAGE_ARTIFACT_SYSTEM_PATH:
if (listener instanceof ResolutionListenerForDepMgmt asImpl) {
asImpl.manageArtifactSystemPath(node.getArtifact(), replacement);
} else {
listener.manageArtifact(node.getArtifact(), replacement);
}
break;
case ResolutionListener.SELECT_VERSION_FROM_RANGE:
listener.selectVersionFromRange(node.getArtifact());
break;
case ResolutionListener.RESTRICT_RANGE:
if (node.getArtifact().getVersionRange().hasRestrictions()
|| replacement.getVersionRange().hasRestrictions()) {
listener.restrictRange(node.getArtifact(), replacement, newRange);
}
break;
default:
throw new IllegalStateException("Unknown event: " + event);
}
}
}
@Override
@SuppressWarnings("checkstyle:parameternumber")
public ArtifactResolutionResult collect(
Set<Artifact> artifacts,
Artifact originatingArtifact,
Map<String, Artifact> managedVersions,
ArtifactRepository localRepository,
List<ArtifactRepository> remoteRepositories,
ArtifactMetadataSource source,
ArtifactFilter filter,
List<ResolutionListener> listeners) {
return collect(
artifacts,
originatingArtifact,
managedVersions,
localRepository,
remoteRepositories,
source,
filter,
listeners,
null);
}
public ArtifactResolutionResult collect(
Set<Artifact> artifacts,
Artifact originatingArtifact,
ArtifactRepository localRepository,
List<ArtifactRepository> remoteRepositories,
ArtifactMetadataSource source,
ArtifactFilter filter,
List<ResolutionListener> listeners) {
return collect(
artifacts, originatingArtifact, null, localRepository, remoteRepositories, source, filter, listeners);
}
}
|
google/j2objc | 37,213 | jre_emul/android/platform/libcore/ojluni/src/main/java/java/net/ServerSocket.java | /*
* Copyright (c) 1995, 2013, Oracle and/or its affiliates. All rights reserved.
* DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER.
*
* This code is free software; you can redistribute it and/or modify it
* under the terms of the GNU General Public License version 2 only, as
* published by the Free Software Foundation. Oracle designates this
* particular file as subject to the "Classpath" exception as provided
* by Oracle in the LICENSE file that accompanied this code.
*
* This code is distributed in the hope that it will be useful, but WITHOUT
* ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
* FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
* version 2 for more details (a copy is included in the LICENSE file that
* accompanied this code).
*
* You should have received a copy of the GNU General Public License version
* 2 along with this work; if not, write to the Free Software Foundation,
* Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA.
*
* Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA
* or visit www.oracle.com if you need additional information or have any
* questions.
*/
package java.net;
import java.io.FileDescriptor;
import java.io.IOException;
import java.nio.channels.ServerSocketChannel;
/* J2ObjC removed
import java.security.AccessController;
import java.security.PrivilegedExceptionAction;
*/
/**
* This class implements server sockets. A server socket waits for
* requests to come in over the network. It performs some operation
* based on that request, and then possibly returns a result to the requester.
* <p>
* The actual work of the server socket is performed by an instance
* of the {@code SocketImpl} class. An application can
* change the socket factory that creates the socket
* implementation to configure itself to create sockets
* appropriate to the local firewall.
*
* @author unascribed
* @see java.net.SocketImpl
* @see java.net.ServerSocket#setSocketFactory(java.net.SocketImplFactory)
* @see java.nio.channels.ServerSocketChannel
* @since JDK1.0
*/
public
class ServerSocket implements java.io.Closeable {
/**
* Various states of this socket.
*/
private boolean created = false;
private boolean bound = false;
private boolean closed = false;
private Object closeLock = new Object();
/**
* The implementation of this Socket.
*/
private SocketImpl impl;
/**
* Are we using an older SocketImpl?
*/
private boolean oldImpl = false;
/**
* Package-private constructor to create a ServerSocket associated with
* the given SocketImpl.
*/
ServerSocket(SocketImpl impl) {
this.impl = impl;
impl.setServerSocket(this);
}
/**
* Creates an unbound server socket.
*
* @exception IOException IO error when opening the socket.
* @revised 1.4
*/
public ServerSocket() throws IOException {
setImpl();
}
/**
* Creates a server socket, bound to the specified port. A port number
* of {@code 0} means that the port number is automatically
* allocated, typically from an ephemeral port range. This port
* number can then be retrieved by calling {@link #getLocalPort getLocalPort}.
* <p>
* The maximum queue length for incoming connection indications (a
* request to connect) is set to {@code 50}. If a connection
* indication arrives when the queue is full, the connection is refused.
* <p>
* If the application has specified a server socket factory, that
* factory's {@code createSocketImpl} method is called to create
* the actual socket implementation. Otherwise a "plain" socket is created.
* <p>
* If there is a security manager,
* its {@code checkListen} method is called
* with the {@code port} argument
* as its argument to ensure the operation is allowed.
* This could result in a SecurityException.
*
*
* @param port the port number, or {@code 0} to use a port
* number that is automatically allocated.
*
* @exception IOException if an I/O error occurs when opening the socket.
* @exception SecurityException
* if a security manager exists and its {@code checkListen}
* method doesn't allow the operation.
* @exception IllegalArgumentException if the port parameter is outside
* the specified range of valid port values, which is between
* 0 and 65535, inclusive.
*
* @see java.net.SocketImpl
* @see java.net.SocketImplFactory#createSocketImpl()
* @see java.net.ServerSocket#setSocketFactory(java.net.SocketImplFactory)
* @see SecurityManager#checkListen
*/
public ServerSocket(int port) throws IOException {
this(port, 50, null);
}
/**
* Creates a server socket and binds it to the specified local port
* number, with the specified backlog.
* A port number of {@code 0} means that the port number is
* automatically allocated, typically from an ephemeral port range.
* This port number can then be retrieved by calling
* {@link #getLocalPort getLocalPort}.
* <p>
* The maximum queue length for incoming connection indications (a
* request to connect) is set to the {@code backlog} parameter. If
* a connection indication arrives when the queue is full, the
* connection is refused.
* <p>
* If the application has specified a server socket factory, that
* factory's {@code createSocketImpl} method is called to create
* the actual socket implementation. Otherwise a "plain" socket is created.
* <p>
* If there is a security manager,
* its {@code checkListen} method is called
* with the {@code port} argument
* as its argument to ensure the operation is allowed.
* This could result in a SecurityException.
*
* The {@code backlog} argument is the requested maximum number of
* pending connections on the socket. Its exact semantics are implementation
* specific. In particular, an implementation may impose a maximum length
* or may choose to ignore the parameter altogther. The value provided
* should be greater than {@code 0}. If it is less than or equal to
* {@code 0}, then an implementation specific default will be used.
* <P>
*
* @param port the port number, or {@code 0} to use a port
* number that is automatically allocated.
* @param backlog requested maximum length of the queue of incoming
* connections.
*
* @exception IOException if an I/O error occurs when opening the socket.
* @exception SecurityException
* if a security manager exists and its {@code checkListen}
* method doesn't allow the operation.
* @exception IllegalArgumentException if the port parameter is outside
* the specified range of valid port values, which is between
* 0 and 65535, inclusive.
*
* @see java.net.SocketImpl
* @see java.net.SocketImplFactory#createSocketImpl()
* @see java.net.ServerSocket#setSocketFactory(java.net.SocketImplFactory)
* @see SecurityManager#checkListen
*/
public ServerSocket(int port, int backlog) throws IOException {
this(port, backlog, null);
}
/**
* Create a server with the specified port, listen backlog, and
* local IP address to bind to. The <i>bindAddr</i> argument
* can be used on a multi-homed host for a ServerSocket that
* will only accept connect requests to one of its addresses.
* If <i>bindAddr</i> is null, it will default accepting
* connections on any/all local addresses.
* The port must be between 0 and 65535, inclusive.
* A port number of {@code 0} means that the port number is
* automatically allocated, typically from an ephemeral port range.
* This port number can then be retrieved by calling
* {@link #getLocalPort getLocalPort}.
*
* <P>If there is a security manager, this method
* calls its {@code checkListen} method
* with the {@code port} argument
* as its argument to ensure the operation is allowed.
* This could result in a SecurityException.
*
* The {@code backlog} argument is the requested maximum number of
* pending connections on the socket. Its exact semantics are implementation
* specific. In particular, an implementation may impose a maximum length
* or may choose to ignore the parameter altogther. The value provided
* should be greater than {@code 0}. If it is less than or equal to
* {@code 0}, then an implementation specific default will be used.
* <P>
* @param port the port number, or {@code 0} to use a port
* number that is automatically allocated.
* @param backlog requested maximum length of the queue of incoming
* connections.
* @param bindAddr the local InetAddress the server will bind to
*
* @throws SecurityException if a security manager exists and
* its {@code checkListen} method doesn't allow the operation.
*
* @throws IOException if an I/O error occurs when opening the socket.
* @exception IllegalArgumentException if the port parameter is outside
* the specified range of valid port values, which is between
* 0 and 65535, inclusive.
*
* @see SocketOptions
* @see SocketImpl
* @see SecurityManager#checkListen
* @since JDK1.1
*/
public ServerSocket(int port, int backlog, InetAddress bindAddr) throws IOException {
setImpl();
if (port < 0 || port > 0xFFFF)
throw new IllegalArgumentException(
"Port value out of range: " + port);
if (backlog < 1)
backlog = 50;
try {
bind(new InetSocketAddress(bindAddr, port), backlog);
} catch(SecurityException e) {
close();
throw e;
} catch(IOException e) {
close();
throw e;
}
}
// Android-changed: Made getImpl() public and @hide, for internal use.
/**
* Get the {@code SocketImpl} attached to this socket, creating
* it if necessary.
*
* @return the {@code SocketImpl} attached to that ServerSocket.
* @throws SocketException if creation fails.
* @since 1.4
* @hide
*/
public SocketImpl getImpl() throws SocketException {
if (!created)
createImpl();
return impl;
}
private void checkOldImpl() {
if (impl == null)
return;
// SocketImpl.connect() is a protected method, therefore we need to use
// getDeclaredMethod, therefore we need permission to access the member
try {
/* J2ObjC removed.
AccessController.doPrivileged(
new PrivilegedExceptionAction<Void>() {
public Void run() throws NoSuchMethodException {
*/
impl.getClass().getDeclaredMethod("connect",
SocketAddress.class,
int.class);
/*
return null;
}
});
} catch (java.security.PrivilegedActionException e) {
*/
} catch (NoSuchMethodException e) {
oldImpl = true;
}
}
private void setImpl() {
if (factory != null) {
impl = factory.createSocketImpl();
checkOldImpl();
} else {
// No need to do a checkOldImpl() here, we know it's an up to date
// SocketImpl!
impl = new SocksSocketImpl();
}
if (impl != null)
impl.setServerSocket(this);
}
/**
* Creates the socket implementation.
*
* @throws IOException if creation fails
* @since 1.4
*/
void createImpl() throws SocketException {
if (impl == null)
setImpl();
try {
impl.create(true);
created = true;
} catch (IOException e) {
throw new SocketException(e.getMessage());
}
}
/**
*
* Binds the {@code ServerSocket} to a specific address
* (IP address and port number).
* <p>
* If the address is {@code null}, then the system will pick up
* an ephemeral port and a valid local address to bind the socket.
* <p>
* @param endpoint The IP address and port number to bind to.
* @throws IOException if the bind operation fails, or if the socket
* is already bound.
* @throws SecurityException if a {@code SecurityManager} is present and
* its {@code checkListen} method doesn't allow the operation.
* @throws IllegalArgumentException if endpoint is a
* SocketAddress subclass not supported by this socket
* @since 1.4
*/
public void bind(SocketAddress endpoint) throws IOException {
bind(endpoint, 50);
}
/**
*
* Binds the {@code ServerSocket} to a specific address
* (IP address and port number).
* <p>
* If the address is {@code null}, then the system will pick up
* an ephemeral port and a valid local address to bind the socket.
* <P>
* The {@code backlog} argument is the requested maximum number of
* pending connections on the socket. Its exact semantics are implementation
* specific. In particular, an implementation may impose a maximum length
* or may choose to ignore the parameter altogther. The value provided
* should be greater than {@code 0}. If it is less than or equal to
* {@code 0}, then an implementation specific default will be used.
* @param endpoint The IP address and port number to bind to.
* @param backlog requested maximum length of the queue of
* incoming connections.
* @throws IOException if the bind operation fails, or if the socket
* is already bound.
* @throws SecurityException if a {@code SecurityManager} is present and
* its {@code checkListen} method doesn't allow the operation.
* @throws IllegalArgumentException if endpoint is a
* SocketAddress subclass not supported by this socket
* @since 1.4
*/
public void bind(SocketAddress endpoint, int backlog) throws IOException {
if (isClosed())
throw new SocketException("Socket is closed");
if (!oldImpl && isBound())
throw new SocketException("Already bound");
if (endpoint == null)
endpoint = new InetSocketAddress(0);
if (!(endpoint instanceof InetSocketAddress))
throw new IllegalArgumentException("Unsupported address type");
InetSocketAddress epoint = (InetSocketAddress) endpoint;
if (epoint.isUnresolved())
throw new SocketException("Unresolved address");
if (backlog < 1)
backlog = 50;
try {
// SecurityManager security = System.getSecurityManager();
// if (security != null)
// security.checkListen(epoint.getPort());
getImpl().bind(epoint.getAddress(), epoint.getPort());
getImpl().listen(backlog);
bound = true;
} catch(SecurityException e) {
bound = false;
throw e;
} catch(IOException e) {
bound = false;
throw e;
}
}
/**
* Returns the local address of this server socket.
* <p>
* If the socket was bound prior to being {@link #close closed},
* then this method will continue to return the local address
* after the socket is closed.
* <p>
* If there is a security manager set, its {@code checkConnect} method is
* called with the local address and {@code -1} as its arguments to see
* if the operation is allowed. If the operation is not allowed,
* the {@link InetAddress#getLoopbackAddress loopback} address is returned.
*
* @return the address to which this socket is bound,
* or the loopback address if denied by the security manager,
* or {@code null} if the socket is unbound.
*
* @see SecurityManager#checkConnect
*/
public InetAddress getInetAddress() {
if (!isBound())
return null;
try {
InetAddress in = getImpl().getInetAddress();
// SecurityManager sm = System.getSecurityManager();
// if (sm != null)
// sm.checkConnect(in.getHostAddress(), -1);
return in;
} catch (SecurityException e) {
return InetAddress.getLoopbackAddress();
} catch (SocketException e) {
// nothing
// If we're bound, the impl has been created
// so we shouldn't get here
}
return null;
}
/**
* Returns the port number on which this socket is listening.
* <p>
* If the socket was bound prior to being {@link #close closed},
* then this method will continue to return the port number
* after the socket is closed.
*
* @return the port number to which this socket is listening or
* -1 if the socket is not bound yet.
*/
public int getLocalPort() {
if (!isBound())
return -1;
try {
return getImpl().getLocalPort();
} catch (SocketException e) {
// nothing
// If we're bound, the impl has been created
// so we shouldn't get here
}
return -1;
}
/**
* Returns the address of the endpoint this socket is bound to.
* <p>
* If the socket was bound prior to being {@link #close closed},
* then this method will continue to return the address of the endpoint
* after the socket is closed.
* <p>
* If there is a security manager set, its {@code checkConnect} method is
* called with the local address and {@code -1} as its arguments to see
* if the operation is allowed. If the operation is not allowed,
* a {@code SocketAddress} representing the
* {@link InetAddress#getLoopbackAddress loopback} address and the local
* port to which the socket is bound is returned.
*
* @return a {@code SocketAddress} representing the local endpoint of
* this socket, or a {@code SocketAddress} representing the
* loopback address if denied by the security manager,
* or {@code null} if the socket is not bound yet.
*
* @see #getInetAddress()
* @see #getLocalPort()
* @see #bind(SocketAddress)
* @see SecurityManager#checkConnect
* @since 1.4
*/
public SocketAddress getLocalSocketAddress() {
if (!isBound())
return null;
return new InetSocketAddress(getInetAddress(), getLocalPort());
}
/**
* Listens for a connection to be made to this socket and accepts
* it. The method blocks until a connection is made.
*
* <p>A new Socket {@code s} is created and, if there
* is a security manager,
* the security manager's {@code checkAccept} method is called
* with {@code s.getInetAddress().getHostAddress()} and
* {@code s.getPort()}
* as its arguments to ensure the operation is allowed.
* This could result in a SecurityException.
*
* @exception IOException if an I/O error occurs when waiting for a
* connection.
* @exception SecurityException if a security manager exists and its
* {@code checkAccept} method doesn't allow the operation.
* @exception SocketTimeoutException if a timeout was previously set with setSoTimeout and
* the timeout has been reached.
* @exception java.nio.channels.IllegalBlockingModeException
* if this socket has an associated channel, the channel is in
* non-blocking mode, and there is no connection ready to be
* accepted
*
* @return the new Socket
* @see SecurityManager#checkAccept
* @revised 1.4
* @spec JSR-51
*/
public Socket accept() throws IOException {
if (isClosed())
throw new SocketException("Socket is closed");
if (!isBound())
throw new SocketException("Socket is not bound yet");
Socket s = new Socket((SocketImpl) null);
implAccept(s);
return s;
}
/**
* Subclasses of ServerSocket use this method to override accept()
* to return their own subclass of socket. So a FooServerSocket
* will typically hand this method an <i>empty</i> FooSocket. On
* return from implAccept the FooSocket will be connected to a client.
*
* @param s the Socket
* @throws java.nio.channels.IllegalBlockingModeException
* if this socket has an associated channel,
* and the channel is in non-blocking mode
* @throws IOException if an I/O error occurs when waiting
* for a connection.
* @since JDK1.1
* @revised 1.4
* @spec JSR-51
*/
protected final void implAccept(Socket s) throws IOException {
SocketImpl si = null;
try {
if (s.impl == null)
s.setImpl();
else {
s.impl.reset();
}
si = s.impl;
s.impl = null;
si.address = new InetAddress();
si.fd = new FileDescriptor();
getImpl().accept(si);
// SecurityManager security = System.getSecurityManager();
// if (security != null) {
// security.checkAccept(si.getInetAddress().getHostAddress(),
// si.getPort());
// }
} catch (IOException e) {
if (si != null)
si.reset();
s.impl = si;
throw e;
} catch (SecurityException e) {
if (si != null)
si.reset();
s.impl = si;
throw e;
}
s.impl = si;
s.postAccept();
}
/**
* Closes this socket.
*
* Any thread currently blocked in {@link #accept()} will throw
* a {@link SocketException}.
*
* <p> If this socket has an associated channel then the channel is closed
* as well.
*
* @exception IOException if an I/O error occurs when closing the socket.
* @revised 1.4
* @spec JSR-51
*/
public void close() throws IOException {
synchronized(closeLock) {
if (isClosed())
return;
if (created)
impl.close();
closed = true;
}
}
/**
* Returns the unique {@link java.nio.channels.ServerSocketChannel} object
* associated with this socket, if any.
*
* <p> A server socket will have a channel if, and only if, the channel
* itself was created via the {@link
* java.nio.channels.ServerSocketChannel#open ServerSocketChannel.open}
* method.
*
* @return the server-socket channel associated with this socket,
* or {@code null} if this socket was not created
* for a channel
*
* @since 1.4
* @spec JSR-51
*/
public ServerSocketChannel getChannel() {
return null;
}
/**
* Returns the binding state of the ServerSocket.
*
* @return true if the ServerSocket successfully bound to an address
* @since 1.4
*/
public boolean isBound() {
// Before 1.3 ServerSockets were always bound during creation
return bound || oldImpl;
}
/**
* Returns the closed state of the ServerSocket.
*
* @return true if the socket has been closed
* @since 1.4
*/
public boolean isClosed() {
synchronized(closeLock) {
return closed;
}
}
/**
* Enable/disable {@link SocketOptions#SO_TIMEOUT SO_TIMEOUT} with the
* specified timeout, in milliseconds. With this option set to a non-zero
* timeout, a call to accept() for this ServerSocket
* will block for only this amount of time. If the timeout expires,
* a <B>java.net.SocketTimeoutException</B> is raised, though the
* ServerSocket is still valid. The option <B>must</B> be enabled
* prior to entering the blocking operation to have effect. The
* timeout must be {@code > 0}.
* A timeout of zero is interpreted as an infinite timeout.
* @param timeout the specified timeout, in milliseconds
* @exception SocketException if there is an error in
* the underlying protocol, such as a TCP error.
* @since JDK1.1
* @see #getSoTimeout()
*/
public synchronized void setSoTimeout(int timeout) throws SocketException {
if (isClosed())
throw new SocketException("Socket is closed");
getImpl().setOption(SocketOptions.SO_TIMEOUT, new Integer(timeout));
}
/**
* Retrieve setting for {@link SocketOptions#SO_TIMEOUT SO_TIMEOUT}.
* 0 returns implies that the option is disabled (i.e., timeout of infinity).
* @return the {@link SocketOptions#SO_TIMEOUT SO_TIMEOUT} value
* @exception IOException if an I/O error occurs
* @since JDK1.1
* @see #setSoTimeout(int)
*/
public synchronized int getSoTimeout() throws IOException {
if (isClosed())
throw new SocketException("Socket is closed");
Object o = getImpl().getOption(SocketOptions.SO_TIMEOUT);
/* extra type safety */
if (o instanceof Integer) {
return ((Integer) o).intValue();
} else {
return 0;
}
}
/**
* Enable/disable the {@link SocketOptions#SO_REUSEADDR SO_REUSEADDR}
* socket option.
* <p>
* When a TCP connection is closed the connection may remain
* in a timeout state for a period of time after the connection
* is closed (typically known as the {@code TIME_WAIT} state
* or {@code 2MSL} wait state).
* For applications using a well known socket address or port
* it may not be possible to bind a socket to the required
* {@code SocketAddress} if there is a connection in the
* timeout state involving the socket address or port.
* <p>
* Enabling {@link SocketOptions#SO_REUSEADDR SO_REUSEADDR} prior to
* binding the socket using {@link #bind(SocketAddress)} allows the socket
* to be bound even though a previous connection is in a timeout state.
* <p>
* When a {@code ServerSocket} is created the initial setting
* of {@link SocketOptions#SO_REUSEADDR SO_REUSEADDR} is not defined.
* Applications can use {@link #getReuseAddress()} to determine the initial
* setting of {@link SocketOptions#SO_REUSEADDR SO_REUSEADDR}.
* <p>
* The behaviour when {@link SocketOptions#SO_REUSEADDR SO_REUSEADDR} is
* enabled or disabled after a socket is bound (See {@link #isBound()})
* is not defined.
*
* @param on whether to enable or disable the socket option
* @exception SocketException if an error occurs enabling or
* disabling the {@link SocketOptions#SO_REUSEADDR SO_REUSEADDR}
* socket option, or the socket is closed.
* @since 1.4
* @see #getReuseAddress()
* @see #bind(SocketAddress)
* @see #isBound()
* @see #isClosed()
*/
public void setReuseAddress(boolean on) throws SocketException {
if (isClosed())
throw new SocketException("Socket is closed");
getImpl().setOption(SocketOptions.SO_REUSEADDR, Boolean.valueOf(on));
}
/**
* Tests if {@link SocketOptions#SO_REUSEADDR SO_REUSEADDR} is enabled.
*
* @return a {@code boolean} indicating whether or not
* {@link SocketOptions#SO_REUSEADDR SO_REUSEADDR} is enabled.
* @exception SocketException if there is an error
* in the underlying protocol, such as a TCP error.
* @since 1.4
* @see #setReuseAddress(boolean)
*/
public boolean getReuseAddress() throws SocketException {
if (isClosed())
throw new SocketException("Socket is closed");
return ((Boolean) (getImpl().getOption(SocketOptions.SO_REUSEADDR))).booleanValue();
}
/**
* Returns the implementation address and implementation port of
* this socket as a {@code String}.
* <p>
* If there is a security manager set, its {@code checkConnect} method is
* called with the local address and {@code -1} as its arguments to see
* if the operation is allowed. If the operation is not allowed,
* an {@code InetAddress} representing the
* {@link InetAddress#getLoopbackAddress loopback} address is returned as
* the implementation address.
*
* @return a string representation of this socket.
*/
public String toString() {
if (!isBound())
return "ServerSocket[unbound]";
InetAddress in;
// if (System.getSecurityManager() != null)
// in = InetAddress.getLoopbackAddress();
// else
in = impl.getInetAddress();
return "ServerSocket[addr=" + in +
",localport=" + impl.getLocalPort() + "]";
}
void setBound() {
bound = true;
}
void setCreated() {
created = true;
}
/**
* The factory for all server sockets.
*/
private static SocketImplFactory factory = null;
/**
* Sets the server socket implementation factory for the
* application. The factory can be specified only once.
* <p>
* When an application creates a new server socket, the socket
* implementation factory's {@code createSocketImpl} method is
* called to create the actual socket implementation.
* <p>
* Passing {@code null} to the method is a no-op unless the factory
* was already set.
* <p>
* If there is a security manager, this method first calls
* the security manager's {@code checkSetFactory} method
* to ensure the operation is allowed.
* This could result in a SecurityException.
*
* @param fac the desired factory.
* @exception IOException if an I/O error occurs when setting the
* socket factory.
* @exception SocketException if the factory has already been defined.
* @exception SecurityException if a security manager exists and its
* {@code checkSetFactory} method doesn't allow the operation.
* @see java.net.SocketImplFactory#createSocketImpl()
* @see SecurityManager#checkSetFactory
*/
public static synchronized void setSocketFactory(SocketImplFactory fac) throws IOException {
if (factory != null) {
throw new SocketException("factory already defined");
}
// SecurityManager security = System.getSecurityManager();
// if (security != null) {
// security.checkSetFactory();
// }
factory = fac;
}
/**
* Sets a default proposed value for the
* {@link SocketOptions#SO_RCVBUF SO_RCVBUF} option for sockets
* accepted from this {@code ServerSocket}. The value actually set
* in the accepted socket must be determined by calling
* {@link Socket#getReceiveBufferSize()} after the socket
* is returned by {@link #accept()}.
* <p>
* The value of {@link SocketOptions#SO_RCVBUF SO_RCVBUF} is used both to
* set the size of the internal socket receive buffer, and to set the size
* of the TCP receive window that is advertized to the remote peer.
* <p>
* It is possible to change the value subsequently, by calling
* {@link Socket#setReceiveBufferSize(int)}. However, if the application
* wishes to allow a receive window larger than 64K bytes, as defined by RFC1323
* then the proposed value must be set in the ServerSocket <B>before</B>
* it is bound to a local address. This implies, that the ServerSocket must be
* created with the no-argument constructor, then setReceiveBufferSize() must
* be called and lastly the ServerSocket is bound to an address by calling bind().
* <p>
* Failure to do this will not cause an error, and the buffer size may be set to the
* requested value but the TCP receive window in sockets accepted from
* this ServerSocket will be no larger than 64K bytes.
*
* @exception SocketException if there is an error
* in the underlying protocol, such as a TCP error.
*
* @param size the size to which to set the receive buffer
* size. This value must be greater than 0.
*
* @exception IllegalArgumentException if the
* value is 0 or is negative.
*
* @since 1.4
* @see #getReceiveBufferSize
*/
public synchronized void setReceiveBufferSize (int size) throws SocketException {
if (!(size > 0)) {
throw new IllegalArgumentException("negative receive size");
}
if (isClosed())
throw new SocketException("Socket is closed");
getImpl().setOption(SocketOptions.SO_RCVBUF, new Integer(size));
}
/**
* Gets the value of the {@link SocketOptions#SO_RCVBUF SO_RCVBUF} option
* for this {@code ServerSocket}, that is the proposed buffer size that
* will be used for Sockets accepted from this {@code ServerSocket}.
*
* <p>Note, the value actually set in the accepted socket is determined by
* calling {@link Socket#getReceiveBufferSize()}.
* @return the value of the {@link SocketOptions#SO_RCVBUF SO_RCVBUF}
* option for this {@code Socket}.
* @exception SocketException if there is an error
* in the underlying protocol, such as a TCP error.
* @see #setReceiveBufferSize(int)
* @since 1.4
*/
public synchronized int getReceiveBufferSize()
throws SocketException{
if (isClosed())
throw new SocketException("Socket is closed");
int result = 0;
Object o = getImpl().getOption(SocketOptions.SO_RCVBUF);
if (o instanceof Integer) {
result = ((Integer)o).intValue();
}
return result;
}
/**
* Sets performance preferences for this ServerSocket.
*
* <p> Sockets use the TCP/IP protocol by default. Some implementations
* may offer alternative protocols which have different performance
* characteristics than TCP/IP. This method allows the application to
* express its own preferences as to how these tradeoffs should be made
* when the implementation chooses from the available protocols.
*
* <p> Performance preferences are described by three integers
* whose values indicate the relative importance of short connection time,
* low latency, and high bandwidth. The absolute values of the integers
* are irrelevant; in order to choose a protocol the values are simply
* compared, with larger values indicating stronger preferences. If the
* application prefers short connection time over both low latency and high
* bandwidth, for example, then it could invoke this method with the values
* {@code (1, 0, 0)}. If the application prefers high bandwidth above low
* latency, and low latency above short connection time, then it could
* invoke this method with the values {@code (0, 1, 2)}.
*
* <p> Invoking this method after this socket has been bound
* will have no effect. This implies that in order to use this capability
* requires the socket to be created with the no-argument constructor.
*
* @param connectionTime
* An {@code int} expressing the relative importance of a short
* connection time
*
* @param latency
* An {@code int} expressing the relative importance of low
* latency
*
* @param bandwidth
* An {@code int} expressing the relative importance of high
* bandwidth
*
* @since 1.5
*/
public void setPerformancePreferences(int connectionTime,
int latency,
int bandwidth)
{
/* Not implemented yet */
}
// Android-added: getFileDescriptor$(), for testing / internal use.
/**
* @hide internal use only
*/
public FileDescriptor getFileDescriptor$() {
return impl.getFileDescriptor();
}
}
|
googleapis/google-cloud-java | 36,900 | java-discoveryengine/proto-google-cloud-discoveryengine-v1/src/main/java/com/google/cloud/discoveryengine/v1/ImportUserEventsMetadata.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/discoveryengine/v1/import_config.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.discoveryengine.v1;
/**
*
*
* <pre>
* Metadata related to the progress of the Import operation. This is
* returned by the google.longrunning.Operation.metadata field.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1.ImportUserEventsMetadata}
*/
public final class ImportUserEventsMetadata extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1.ImportUserEventsMetadata)
ImportUserEventsMetadataOrBuilder {
private static final long serialVersionUID = 0L;
// Use ImportUserEventsMetadata.newBuilder() to construct.
private ImportUserEventsMetadata(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ImportUserEventsMetadata() {}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ImportUserEventsMetadata();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1.ImportConfigProto
.internal_static_google_cloud_discoveryengine_v1_ImportUserEventsMetadata_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1.ImportConfigProto
.internal_static_google_cloud_discoveryengine_v1_ImportUserEventsMetadata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata.class,
com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata.Builder.class);
}
private int bitField0_;
public static final int CREATE_TIME_FIELD_NUMBER = 1;
private com.google.protobuf.Timestamp createTime_;
/**
*
*
* <pre>
* Operation create time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 1;</code>
*
* @return Whether the createTime field is set.
*/
@java.lang.Override
public boolean hasCreateTime() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Operation create time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 1;</code>
*
* @return The createTime.
*/
@java.lang.Override
public com.google.protobuf.Timestamp getCreateTime() {
return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_;
}
/**
*
*
* <pre>
* Operation create time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 1;</code>
*/
@java.lang.Override
public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() {
return createTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : createTime_;
}
public static final int UPDATE_TIME_FIELD_NUMBER = 2;
private com.google.protobuf.Timestamp updateTime_;
/**
*
*
* <pre>
* Operation last update time. If the operation is done, this is also the
* finish time.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2;</code>
*
* @return Whether the updateTime field is set.
*/
@java.lang.Override
public boolean hasUpdateTime() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Operation last update time. If the operation is done, this is also the
* finish time.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2;</code>
*
* @return The updateTime.
*/
@java.lang.Override
public com.google.protobuf.Timestamp getUpdateTime() {
return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_;
}
/**
*
*
* <pre>
* Operation last update time. If the operation is done, this is also the
* finish time.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2;</code>
*/
@java.lang.Override
public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() {
return updateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : updateTime_;
}
public static final int SUCCESS_COUNT_FIELD_NUMBER = 3;
private long successCount_ = 0L;
/**
*
*
* <pre>
* Count of entries that were processed successfully.
* </pre>
*
* <code>int64 success_count = 3;</code>
*
* @return The successCount.
*/
@java.lang.Override
public long getSuccessCount() {
return successCount_;
}
public static final int FAILURE_COUNT_FIELD_NUMBER = 4;
private long failureCount_ = 0L;
/**
*
*
* <pre>
* Count of entries that encountered errors while processing.
* </pre>
*
* <code>int64 failure_count = 4;</code>
*
* @return The failureCount.
*/
@java.lang.Override
public long getFailureCount() {
return failureCount_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(1, getCreateTime());
}
if (((bitField0_ & 0x00000002) != 0)) {
output.writeMessage(2, getUpdateTime());
}
if (successCount_ != 0L) {
output.writeInt64(3, successCount_);
}
if (failureCount_ != 0L) {
output.writeInt64(4, failureCount_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getCreateTime());
}
if (((bitField0_ & 0x00000002) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateTime());
}
if (successCount_ != 0L) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(3, successCount_);
}
if (failureCount_ != 0L) {
size += com.google.protobuf.CodedOutputStream.computeInt64Size(4, failureCount_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata)) {
return super.equals(obj);
}
com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata other =
(com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata) obj;
if (hasCreateTime() != other.hasCreateTime()) return false;
if (hasCreateTime()) {
if (!getCreateTime().equals(other.getCreateTime())) return false;
}
if (hasUpdateTime() != other.hasUpdateTime()) return false;
if (hasUpdateTime()) {
if (!getUpdateTime().equals(other.getUpdateTime())) return false;
}
if (getSuccessCount() != other.getSuccessCount()) return false;
if (getFailureCount() != other.getFailureCount()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (hasCreateTime()) {
hash = (37 * hash) + CREATE_TIME_FIELD_NUMBER;
hash = (53 * hash) + getCreateTime().hashCode();
}
if (hasUpdateTime()) {
hash = (37 * hash) + UPDATE_TIME_FIELD_NUMBER;
hash = (53 * hash) + getUpdateTime().hashCode();
}
hash = (37 * hash) + SUCCESS_COUNT_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getSuccessCount());
hash = (37 * hash) + FAILURE_COUNT_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getFailureCount());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Metadata related to the progress of the Import operation. This is
* returned by the google.longrunning.Operation.metadata field.
* </pre>
*
* Protobuf type {@code google.cloud.discoveryengine.v1.ImportUserEventsMetadata}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1.ImportUserEventsMetadata)
com.google.cloud.discoveryengine.v1.ImportUserEventsMetadataOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.discoveryengine.v1.ImportConfigProto
.internal_static_google_cloud_discoveryengine_v1_ImportUserEventsMetadata_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.discoveryengine.v1.ImportConfigProto
.internal_static_google_cloud_discoveryengine_v1_ImportUserEventsMetadata_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata.class,
com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata.Builder.class);
}
// Construct using com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getCreateTimeFieldBuilder();
getUpdateTimeFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
createTime_ = null;
if (createTimeBuilder_ != null) {
createTimeBuilder_.dispose();
createTimeBuilder_ = null;
}
updateTime_ = null;
if (updateTimeBuilder_ != null) {
updateTimeBuilder_.dispose();
updateTimeBuilder_ = null;
}
successCount_ = 0L;
failureCount_ = 0L;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.discoveryengine.v1.ImportConfigProto
.internal_static_google_cloud_discoveryengine_v1_ImportUserEventsMetadata_descriptor;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata
getDefaultInstanceForType() {
return com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata build() {
com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata buildPartial() {
com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata result =
new com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata result) {
int from_bitField0_ = bitField0_;
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.createTime_ = createTimeBuilder_ == null ? createTime_ : createTimeBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
result.updateTime_ = updateTimeBuilder_ == null ? updateTime_ : updateTimeBuilder_.build();
to_bitField0_ |= 0x00000002;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.successCount_ = successCount_;
}
if (((from_bitField0_ & 0x00000008) != 0)) {
result.failureCount_ = failureCount_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata) {
return mergeFrom((com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata other) {
if (other
== com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata.getDefaultInstance())
return this;
if (other.hasCreateTime()) {
mergeCreateTime(other.getCreateTime());
}
if (other.hasUpdateTime()) {
mergeUpdateTime(other.getUpdateTime());
}
if (other.getSuccessCount() != 0L) {
setSuccessCount(other.getSuccessCount());
}
if (other.getFailureCount() != 0L) {
setFailureCount(other.getFailureCount());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
input.readMessage(getCreateTimeFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getUpdateTimeFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 24:
{
successCount_ = input.readInt64();
bitField0_ |= 0x00000004;
break;
} // case 24
case 32:
{
failureCount_ = input.readInt64();
bitField0_ |= 0x00000008;
break;
} // case 32
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.Timestamp createTime_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
createTimeBuilder_;
/**
*
*
* <pre>
* Operation create time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 1;</code>
*
* @return Whether the createTime field is set.
*/
public boolean hasCreateTime() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Operation create time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 1;</code>
*
* @return The createTime.
*/
public com.google.protobuf.Timestamp getCreateTime() {
if (createTimeBuilder_ == null) {
return createTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: createTime_;
} else {
return createTimeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Operation create time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 1;</code>
*/
public Builder setCreateTime(com.google.protobuf.Timestamp value) {
if (createTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
createTime_ = value;
} else {
createTimeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Operation create time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 1;</code>
*/
public Builder setCreateTime(com.google.protobuf.Timestamp.Builder builderForValue) {
if (createTimeBuilder_ == null) {
createTime_ = builderForValue.build();
} else {
createTimeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Operation create time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 1;</code>
*/
public Builder mergeCreateTime(com.google.protobuf.Timestamp value) {
if (createTimeBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)
&& createTime_ != null
&& createTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) {
getCreateTimeBuilder().mergeFrom(value);
} else {
createTime_ = value;
}
} else {
createTimeBuilder_.mergeFrom(value);
}
if (createTime_ != null) {
bitField0_ |= 0x00000001;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Operation create time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 1;</code>
*/
public Builder clearCreateTime() {
bitField0_ = (bitField0_ & ~0x00000001);
createTime_ = null;
if (createTimeBuilder_ != null) {
createTimeBuilder_.dispose();
createTimeBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Operation create time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 1;</code>
*/
public com.google.protobuf.Timestamp.Builder getCreateTimeBuilder() {
bitField0_ |= 0x00000001;
onChanged();
return getCreateTimeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Operation create time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 1;</code>
*/
public com.google.protobuf.TimestampOrBuilder getCreateTimeOrBuilder() {
if (createTimeBuilder_ != null) {
return createTimeBuilder_.getMessageOrBuilder();
} else {
return createTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: createTime_;
}
}
/**
*
*
* <pre>
* Operation create time.
* </pre>
*
* <code>.google.protobuf.Timestamp create_time = 1;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
getCreateTimeFieldBuilder() {
if (createTimeBuilder_ == null) {
createTimeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>(
getCreateTime(), getParentForChildren(), isClean());
createTime_ = null;
}
return createTimeBuilder_;
}
private com.google.protobuf.Timestamp updateTime_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
updateTimeBuilder_;
/**
*
*
* <pre>
* Operation last update time. If the operation is done, this is also the
* finish time.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2;</code>
*
* @return Whether the updateTime field is set.
*/
public boolean hasUpdateTime() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Operation last update time. If the operation is done, this is also the
* finish time.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2;</code>
*
* @return The updateTime.
*/
public com.google.protobuf.Timestamp getUpdateTime() {
if (updateTimeBuilder_ == null) {
return updateTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: updateTime_;
} else {
return updateTimeBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Operation last update time. If the operation is done, this is also the
* finish time.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2;</code>
*/
public Builder setUpdateTime(com.google.protobuf.Timestamp value) {
if (updateTimeBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
updateTime_ = value;
} else {
updateTimeBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Operation last update time. If the operation is done, this is also the
* finish time.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2;</code>
*/
public Builder setUpdateTime(com.google.protobuf.Timestamp.Builder builderForValue) {
if (updateTimeBuilder_ == null) {
updateTime_ = builderForValue.build();
} else {
updateTimeBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Operation last update time. If the operation is done, this is also the
* finish time.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2;</code>
*/
public Builder mergeUpdateTime(com.google.protobuf.Timestamp value) {
if (updateTimeBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& updateTime_ != null
&& updateTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) {
getUpdateTimeBuilder().mergeFrom(value);
} else {
updateTime_ = value;
}
} else {
updateTimeBuilder_.mergeFrom(value);
}
if (updateTime_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Operation last update time. If the operation is done, this is also the
* finish time.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2;</code>
*/
public Builder clearUpdateTime() {
bitField0_ = (bitField0_ & ~0x00000002);
updateTime_ = null;
if (updateTimeBuilder_ != null) {
updateTimeBuilder_.dispose();
updateTimeBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Operation last update time. If the operation is done, this is also the
* finish time.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2;</code>
*/
public com.google.protobuf.Timestamp.Builder getUpdateTimeBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getUpdateTimeFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Operation last update time. If the operation is done, this is also the
* finish time.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2;</code>
*/
public com.google.protobuf.TimestampOrBuilder getUpdateTimeOrBuilder() {
if (updateTimeBuilder_ != null) {
return updateTimeBuilder_.getMessageOrBuilder();
} else {
return updateTime_ == null
? com.google.protobuf.Timestamp.getDefaultInstance()
: updateTime_;
}
}
/**
*
*
* <pre>
* Operation last update time. If the operation is done, this is also the
* finish time.
* </pre>
*
* <code>.google.protobuf.Timestamp update_time = 2;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>
getUpdateTimeFieldBuilder() {
if (updateTimeBuilder_ == null) {
updateTimeBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Timestamp,
com.google.protobuf.Timestamp.Builder,
com.google.protobuf.TimestampOrBuilder>(
getUpdateTime(), getParentForChildren(), isClean());
updateTime_ = null;
}
return updateTimeBuilder_;
}
private long successCount_;
/**
*
*
* <pre>
* Count of entries that were processed successfully.
* </pre>
*
* <code>int64 success_count = 3;</code>
*
* @return The successCount.
*/
@java.lang.Override
public long getSuccessCount() {
return successCount_;
}
/**
*
*
* <pre>
* Count of entries that were processed successfully.
* </pre>
*
* <code>int64 success_count = 3;</code>
*
* @param value The successCount to set.
* @return This builder for chaining.
*/
public Builder setSuccessCount(long value) {
successCount_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* Count of entries that were processed successfully.
* </pre>
*
* <code>int64 success_count = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearSuccessCount() {
bitField0_ = (bitField0_ & ~0x00000004);
successCount_ = 0L;
onChanged();
return this;
}
private long failureCount_;
/**
*
*
* <pre>
* Count of entries that encountered errors while processing.
* </pre>
*
* <code>int64 failure_count = 4;</code>
*
* @return The failureCount.
*/
@java.lang.Override
public long getFailureCount() {
return failureCount_;
}
/**
*
*
* <pre>
* Count of entries that encountered errors while processing.
* </pre>
*
* <code>int64 failure_count = 4;</code>
*
* @param value The failureCount to set.
* @return This builder for chaining.
*/
public Builder setFailureCount(long value) {
failureCount_ = value;
bitField0_ |= 0x00000008;
onChanged();
return this;
}
/**
*
*
* <pre>
* Count of entries that encountered errors while processing.
* </pre>
*
* <code>int64 failure_count = 4;</code>
*
* @return This builder for chaining.
*/
public Builder clearFailureCount() {
bitField0_ = (bitField0_ & ~0x00000008);
failureCount_ = 0L;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1.ImportUserEventsMetadata)
}
// @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1.ImportUserEventsMetadata)
private static final com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata();
}
public static com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ImportUserEventsMetadata> PARSER =
new com.google.protobuf.AbstractParser<ImportUserEventsMetadata>() {
@java.lang.Override
public ImportUserEventsMetadata parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ImportUserEventsMetadata> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ImportUserEventsMetadata> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.discoveryengine.v1.ImportUserEventsMetadata getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,960 | java-managedkafka/proto-google-cloud-managedkafka-v1/src/main/java/com/google/cloud/managedkafka/v1/ListConnectorsResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/managedkafka/v1/managed_kafka_connect.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.managedkafka.v1;
/**
*
*
* <pre>
* Response for ListConnectors.
* </pre>
*
* Protobuf type {@code google.cloud.managedkafka.v1.ListConnectorsResponse}
*/
public final class ListConnectorsResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.managedkafka.v1.ListConnectorsResponse)
ListConnectorsResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListConnectorsResponse.newBuilder() to construct.
private ListConnectorsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListConnectorsResponse() {
connectors_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListConnectorsResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.managedkafka.v1.ManagedKafkaConnectProto
.internal_static_google_cloud_managedkafka_v1_ListConnectorsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.managedkafka.v1.ManagedKafkaConnectProto
.internal_static_google_cloud_managedkafka_v1_ListConnectorsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.managedkafka.v1.ListConnectorsResponse.class,
com.google.cloud.managedkafka.v1.ListConnectorsResponse.Builder.class);
}
public static final int CONNECTORS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.managedkafka.v1.Connector> connectors_;
/**
*
*
* <pre>
* The list of connectors in the requested parent.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Connector connectors = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.managedkafka.v1.Connector> getConnectorsList() {
return connectors_;
}
/**
*
*
* <pre>
* The list of connectors in the requested parent.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Connector connectors = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.managedkafka.v1.ConnectorOrBuilder>
getConnectorsOrBuilderList() {
return connectors_;
}
/**
*
*
* <pre>
* The list of connectors in the requested parent.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Connector connectors = 1;</code>
*/
@java.lang.Override
public int getConnectorsCount() {
return connectors_.size();
}
/**
*
*
* <pre>
* The list of connectors in the requested parent.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Connector connectors = 1;</code>
*/
@java.lang.Override
public com.google.cloud.managedkafka.v1.Connector getConnectors(int index) {
return connectors_.get(index);
}
/**
*
*
* <pre>
* The list of connectors in the requested parent.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Connector connectors = 1;</code>
*/
@java.lang.Override
public com.google.cloud.managedkafka.v1.ConnectorOrBuilder getConnectorsOrBuilder(int index) {
return connectors_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page of
* results. If this field is omitted, there are no more results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page of
* results. If this field is omitted, there are no more results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < connectors_.size(); i++) {
output.writeMessage(1, connectors_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < connectors_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, connectors_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.managedkafka.v1.ListConnectorsResponse)) {
return super.equals(obj);
}
com.google.cloud.managedkafka.v1.ListConnectorsResponse other =
(com.google.cloud.managedkafka.v1.ListConnectorsResponse) obj;
if (!getConnectorsList().equals(other.getConnectorsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getConnectorsCount() > 0) {
hash = (37 * hash) + CONNECTORS_FIELD_NUMBER;
hash = (53 * hash) + getConnectorsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.managedkafka.v1.ListConnectorsResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.managedkafka.v1.ListConnectorsResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.ListConnectorsResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.managedkafka.v1.ListConnectorsResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.ListConnectorsResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.managedkafka.v1.ListConnectorsResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.ListConnectorsResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.managedkafka.v1.ListConnectorsResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.ListConnectorsResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.managedkafka.v1.ListConnectorsResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.managedkafka.v1.ListConnectorsResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.managedkafka.v1.ListConnectorsResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.managedkafka.v1.ListConnectorsResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response for ListConnectors.
* </pre>
*
* Protobuf type {@code google.cloud.managedkafka.v1.ListConnectorsResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.managedkafka.v1.ListConnectorsResponse)
com.google.cloud.managedkafka.v1.ListConnectorsResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.managedkafka.v1.ManagedKafkaConnectProto
.internal_static_google_cloud_managedkafka_v1_ListConnectorsResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.managedkafka.v1.ManagedKafkaConnectProto
.internal_static_google_cloud_managedkafka_v1_ListConnectorsResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.managedkafka.v1.ListConnectorsResponse.class,
com.google.cloud.managedkafka.v1.ListConnectorsResponse.Builder.class);
}
// Construct using com.google.cloud.managedkafka.v1.ListConnectorsResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (connectorsBuilder_ == null) {
connectors_ = java.util.Collections.emptyList();
} else {
connectors_ = null;
connectorsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.managedkafka.v1.ManagedKafkaConnectProto
.internal_static_google_cloud_managedkafka_v1_ListConnectorsResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.ListConnectorsResponse getDefaultInstanceForType() {
return com.google.cloud.managedkafka.v1.ListConnectorsResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.ListConnectorsResponse build() {
com.google.cloud.managedkafka.v1.ListConnectorsResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.ListConnectorsResponse buildPartial() {
com.google.cloud.managedkafka.v1.ListConnectorsResponse result =
new com.google.cloud.managedkafka.v1.ListConnectorsResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.managedkafka.v1.ListConnectorsResponse result) {
if (connectorsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
connectors_ = java.util.Collections.unmodifiableList(connectors_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.connectors_ = connectors_;
} else {
result.connectors_ = connectorsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.managedkafka.v1.ListConnectorsResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.managedkafka.v1.ListConnectorsResponse) {
return mergeFrom((com.google.cloud.managedkafka.v1.ListConnectorsResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.managedkafka.v1.ListConnectorsResponse other) {
if (other == com.google.cloud.managedkafka.v1.ListConnectorsResponse.getDefaultInstance())
return this;
if (connectorsBuilder_ == null) {
if (!other.connectors_.isEmpty()) {
if (connectors_.isEmpty()) {
connectors_ = other.connectors_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureConnectorsIsMutable();
connectors_.addAll(other.connectors_);
}
onChanged();
}
} else {
if (!other.connectors_.isEmpty()) {
if (connectorsBuilder_.isEmpty()) {
connectorsBuilder_.dispose();
connectorsBuilder_ = null;
connectors_ = other.connectors_;
bitField0_ = (bitField0_ & ~0x00000001);
connectorsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getConnectorsFieldBuilder()
: null;
} else {
connectorsBuilder_.addAllMessages(other.connectors_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.managedkafka.v1.Connector m =
input.readMessage(
com.google.cloud.managedkafka.v1.Connector.parser(), extensionRegistry);
if (connectorsBuilder_ == null) {
ensureConnectorsIsMutable();
connectors_.add(m);
} else {
connectorsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.managedkafka.v1.Connector> connectors_ =
java.util.Collections.emptyList();
private void ensureConnectorsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
connectors_ =
new java.util.ArrayList<com.google.cloud.managedkafka.v1.Connector>(connectors_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.managedkafka.v1.Connector,
com.google.cloud.managedkafka.v1.Connector.Builder,
com.google.cloud.managedkafka.v1.ConnectorOrBuilder>
connectorsBuilder_;
/**
*
*
* <pre>
* The list of connectors in the requested parent.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Connector connectors = 1;</code>
*/
public java.util.List<com.google.cloud.managedkafka.v1.Connector> getConnectorsList() {
if (connectorsBuilder_ == null) {
return java.util.Collections.unmodifiableList(connectors_);
} else {
return connectorsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* The list of connectors in the requested parent.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Connector connectors = 1;</code>
*/
public int getConnectorsCount() {
if (connectorsBuilder_ == null) {
return connectors_.size();
} else {
return connectorsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* The list of connectors in the requested parent.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Connector connectors = 1;</code>
*/
public com.google.cloud.managedkafka.v1.Connector getConnectors(int index) {
if (connectorsBuilder_ == null) {
return connectors_.get(index);
} else {
return connectorsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* The list of connectors in the requested parent.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Connector connectors = 1;</code>
*/
public Builder setConnectors(int index, com.google.cloud.managedkafka.v1.Connector value) {
if (connectorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureConnectorsIsMutable();
connectors_.set(index, value);
onChanged();
} else {
connectorsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of connectors in the requested parent.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Connector connectors = 1;</code>
*/
public Builder setConnectors(
int index, com.google.cloud.managedkafka.v1.Connector.Builder builderForValue) {
if (connectorsBuilder_ == null) {
ensureConnectorsIsMutable();
connectors_.set(index, builderForValue.build());
onChanged();
} else {
connectorsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of connectors in the requested parent.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Connector connectors = 1;</code>
*/
public Builder addConnectors(com.google.cloud.managedkafka.v1.Connector value) {
if (connectorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureConnectorsIsMutable();
connectors_.add(value);
onChanged();
} else {
connectorsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* The list of connectors in the requested parent.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Connector connectors = 1;</code>
*/
public Builder addConnectors(int index, com.google.cloud.managedkafka.v1.Connector value) {
if (connectorsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureConnectorsIsMutable();
connectors_.add(index, value);
onChanged();
} else {
connectorsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* The list of connectors in the requested parent.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Connector connectors = 1;</code>
*/
public Builder addConnectors(
com.google.cloud.managedkafka.v1.Connector.Builder builderForValue) {
if (connectorsBuilder_ == null) {
ensureConnectorsIsMutable();
connectors_.add(builderForValue.build());
onChanged();
} else {
connectorsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of connectors in the requested parent.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Connector connectors = 1;</code>
*/
public Builder addConnectors(
int index, com.google.cloud.managedkafka.v1.Connector.Builder builderForValue) {
if (connectorsBuilder_ == null) {
ensureConnectorsIsMutable();
connectors_.add(index, builderForValue.build());
onChanged();
} else {
connectorsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* The list of connectors in the requested parent.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Connector connectors = 1;</code>
*/
public Builder addAllConnectors(
java.lang.Iterable<? extends com.google.cloud.managedkafka.v1.Connector> values) {
if (connectorsBuilder_ == null) {
ensureConnectorsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, connectors_);
onChanged();
} else {
connectorsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* The list of connectors in the requested parent.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Connector connectors = 1;</code>
*/
public Builder clearConnectors() {
if (connectorsBuilder_ == null) {
connectors_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
connectorsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* The list of connectors in the requested parent.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Connector connectors = 1;</code>
*/
public Builder removeConnectors(int index) {
if (connectorsBuilder_ == null) {
ensureConnectorsIsMutable();
connectors_.remove(index);
onChanged();
} else {
connectorsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* The list of connectors in the requested parent.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Connector connectors = 1;</code>
*/
public com.google.cloud.managedkafka.v1.Connector.Builder getConnectorsBuilder(int index) {
return getConnectorsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* The list of connectors in the requested parent.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Connector connectors = 1;</code>
*/
public com.google.cloud.managedkafka.v1.ConnectorOrBuilder getConnectorsOrBuilder(int index) {
if (connectorsBuilder_ == null) {
return connectors_.get(index);
} else {
return connectorsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* The list of connectors in the requested parent.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Connector connectors = 1;</code>
*/
public java.util.List<? extends com.google.cloud.managedkafka.v1.ConnectorOrBuilder>
getConnectorsOrBuilderList() {
if (connectorsBuilder_ != null) {
return connectorsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(connectors_);
}
}
/**
*
*
* <pre>
* The list of connectors in the requested parent.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Connector connectors = 1;</code>
*/
public com.google.cloud.managedkafka.v1.Connector.Builder addConnectorsBuilder() {
return getConnectorsFieldBuilder()
.addBuilder(com.google.cloud.managedkafka.v1.Connector.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of connectors in the requested parent.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Connector connectors = 1;</code>
*/
public com.google.cloud.managedkafka.v1.Connector.Builder addConnectorsBuilder(int index) {
return getConnectorsFieldBuilder()
.addBuilder(index, com.google.cloud.managedkafka.v1.Connector.getDefaultInstance());
}
/**
*
*
* <pre>
* The list of connectors in the requested parent.
* </pre>
*
* <code>repeated .google.cloud.managedkafka.v1.Connector connectors = 1;</code>
*/
public java.util.List<com.google.cloud.managedkafka.v1.Connector.Builder>
getConnectorsBuilderList() {
return getConnectorsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.managedkafka.v1.Connector,
com.google.cloud.managedkafka.v1.Connector.Builder,
com.google.cloud.managedkafka.v1.ConnectorOrBuilder>
getConnectorsFieldBuilder() {
if (connectorsBuilder_ == null) {
connectorsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.managedkafka.v1.Connector,
com.google.cloud.managedkafka.v1.Connector.Builder,
com.google.cloud.managedkafka.v1.ConnectorOrBuilder>(
connectors_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
connectors_ = null;
}
return connectorsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page of
* results. If this field is omitted, there are no more results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page of
* results. If this field is omitted, there are no more results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page of
* results. If this field is omitted, there are no more results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page of
* results. If this field is omitted, there are no more results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that can be sent as `page_token` to retrieve the next page of
* results. If this field is omitted, there are no more results.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.managedkafka.v1.ListConnectorsResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.managedkafka.v1.ListConnectorsResponse)
private static final com.google.cloud.managedkafka.v1.ListConnectorsResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.managedkafka.v1.ListConnectorsResponse();
}
public static com.google.cloud.managedkafka.v1.ListConnectorsResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListConnectorsResponse> PARSER =
new com.google.protobuf.AbstractParser<ListConnectorsResponse>() {
@java.lang.Override
public ListConnectorsResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListConnectorsResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListConnectorsResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.managedkafka.v1.ListConnectorsResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/httpcomponents-client | 37,207 | httpclient5/src/main/java/org/apache/hc/client5/http/impl/io/PoolingHttpClientConnectionManager.java | /*
* ====================================================================
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing,
* software distributed under the License is distributed on an
* "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
* KIND, either express or implied. See the License for the
* specific language governing permissions and limitations
* under the License.
* ====================================================================
*
* This software consists of voluntary contributions made by many
* individuals on behalf of the Apache Software Foundation. For more
* information on the Apache Software Foundation, please see
* <http://www.apache.org/>.
*
*/
package org.apache.hc.client5.http.impl.io;
import java.io.IOException;
import java.nio.file.Path;
import java.util.Set;
import java.util.concurrent.ExecutionException;
import java.util.concurrent.Future;
import java.util.concurrent.TimeoutException;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicReference;
import java.util.concurrent.locks.ReentrantLock;
import org.apache.hc.client5.http.DnsResolver;
import org.apache.hc.client5.http.EndpointInfo;
import org.apache.hc.client5.http.HttpRoute;
import org.apache.hc.client5.http.SchemePortResolver;
import org.apache.hc.client5.http.config.ConnectionConfig;
import org.apache.hc.client5.http.config.TlsConfig;
import org.apache.hc.client5.http.impl.ConnPoolSupport;
import org.apache.hc.client5.http.impl.ConnectionHolder;
import org.apache.hc.client5.http.impl.ConnectionShutdownException;
import org.apache.hc.client5.http.impl.PrefixedIncrementingId;
import org.apache.hc.client5.http.io.ConnectionEndpoint;
import org.apache.hc.client5.http.io.HttpClientConnectionManager;
import org.apache.hc.client5.http.io.HttpClientConnectionOperator;
import org.apache.hc.client5.http.io.LeaseRequest;
import org.apache.hc.client5.http.io.ManagedHttpClientConnection;
import org.apache.hc.client5.http.ssl.DefaultClientTlsStrategy;
import org.apache.hc.client5.http.ssl.TlsSocketStrategy;
import org.apache.hc.core5.annotation.Contract;
import org.apache.hc.core5.annotation.Internal;
import org.apache.hc.core5.annotation.ThreadingBehavior;
import org.apache.hc.core5.function.Resolver;
import org.apache.hc.core5.http.ClassicHttpRequest;
import org.apache.hc.core5.http.ClassicHttpResponse;
import org.apache.hc.core5.http.HttpConnection;
import org.apache.hc.core5.http.HttpException;
import org.apache.hc.core5.http.HttpHost;
import org.apache.hc.core5.http.URIScheme;
import org.apache.hc.core5.http.config.Registry;
import org.apache.hc.core5.http.config.RegistryBuilder;
import org.apache.hc.core5.http.impl.io.HttpRequestExecutor;
import org.apache.hc.core5.http.io.HttpConnectionFactory;
import org.apache.hc.core5.http.io.SocketConfig;
import org.apache.hc.core5.http.protocol.HttpContext;
import org.apache.hc.core5.io.CloseMode;
import org.apache.hc.core5.pool.ConnPoolControl;
import org.apache.hc.core5.pool.DefaultDisposalCallback;
import org.apache.hc.core5.pool.DisposalCallback;
import org.apache.hc.core5.pool.LaxConnPool;
import org.apache.hc.core5.pool.ManagedConnPool;
import org.apache.hc.core5.pool.PoolConcurrencyPolicy;
import org.apache.hc.core5.pool.PoolEntry;
import org.apache.hc.core5.pool.PoolReusePolicy;
import org.apache.hc.core5.pool.PoolStats;
import org.apache.hc.core5.pool.StrictConnPool;
import org.apache.hc.core5.util.Args;
import org.apache.hc.core5.util.Deadline;
import org.apache.hc.core5.util.Identifiable;
import org.apache.hc.core5.util.TimeValue;
import org.apache.hc.core5.util.Timeout;
import org.slf4j.Logger;
import org.slf4j.LoggerFactory;
/**
* {@code PoolingHttpClientConnectionManager} maintains a pool of
* {@link ManagedHttpClientConnection}s and is able to service connection requests
* from multiple execution threads. Connections are pooled on a per route
* basis. A request for a route which already the manager has persistent
* connections for available in the pool will be serviced by leasing
* a connection from the pool rather than creating a new connection.
* <p>
* {@code PoolingHttpClientConnectionManager} maintains a maximum limit of connections
* per route and in total. Connection limits, however, can be adjusted
* using {@link ConnPoolControl} methods.
* <p>
* Total time to live (TTL) set at construction time defines maximum life span
* of persistent connections regardless of their expiration setting. No persistent
* connection will be re-used past its TTL value.
*
* @since 4.3
*/
@Contract(threading = ThreadingBehavior.SAFE_CONDITIONAL)
public class PoolingHttpClientConnectionManager
implements HttpClientConnectionManager, ConnPoolControl<HttpRoute> {
private static final Logger LOG = LoggerFactory.getLogger(PoolingHttpClientConnectionManager.class);
private final DisposalCallback<ManagedHttpClientConnection> defaultDisposal;
private final OffLockDisposalCallback<ManagedHttpClientConnection> offLockDisposer;
public static final int DEFAULT_MAX_TOTAL_CONNECTIONS = 25;
public static final int DEFAULT_MAX_CONNECTIONS_PER_ROUTE = 5;
private final HttpClientConnectionOperator connectionOperator;
private final ManagedConnPool<HttpRoute, ManagedHttpClientConnection> pool;
private final HttpConnectionFactory<ManagedHttpClientConnection> connFactory;
private final AtomicBoolean closed;
private volatile Resolver<HttpRoute, SocketConfig> socketConfigResolver;
private volatile Resolver<HttpRoute, ConnectionConfig> connectionConfigResolver;
private volatile Resolver<HttpHost, TlsConfig> tlsConfigResolver;
public PoolingHttpClientConnectionManager() {
this(new DefaultHttpClientConnectionOperator(null, null,
RegistryBuilder.<TlsSocketStrategy>create()
.register(URIScheme.HTTPS.id, DefaultClientTlsStrategy.createDefault())
.build()),
PoolConcurrencyPolicy.STRICT,
PoolReusePolicy.LIFO,
TimeValue.NEG_ONE_MILLISECOND,
null);
}
/**
* @deprecated Use {@link PoolingHttpClientConnectionManagerBuilder}
*/
@Deprecated
public PoolingHttpClientConnectionManager(
final Registry<org.apache.hc.client5.http.socket.ConnectionSocketFactory> socketFactoryRegistry) {
this(socketFactoryRegistry, null);
}
/**
* @deprecated Use {@link PoolingHttpClientConnectionManagerBuilder}
*/
@Deprecated
public PoolingHttpClientConnectionManager(
final Registry<org.apache.hc.client5.http.socket.ConnectionSocketFactory> socketFactoryRegistry,
final HttpConnectionFactory<ManagedHttpClientConnection> connFactory) {
this(socketFactoryRegistry, PoolConcurrencyPolicy.STRICT, TimeValue.NEG_ONE_MILLISECOND, connFactory);
}
/**
* @deprecated Use {@link PoolingHttpClientConnectionManagerBuilder}
*/
@Deprecated
public PoolingHttpClientConnectionManager(
final Registry<org.apache.hc.client5.http.socket.ConnectionSocketFactory> socketFactoryRegistry,
final PoolConcurrencyPolicy poolConcurrencyPolicy,
final TimeValue timeToLive,
final HttpConnectionFactory<ManagedHttpClientConnection> connFactory) {
this(socketFactoryRegistry, poolConcurrencyPolicy, PoolReusePolicy.LIFO, timeToLive, connFactory);
}
/**
* @deprecated Use {@link PoolingHttpClientConnectionManagerBuilder}
*/
@Deprecated
public PoolingHttpClientConnectionManager(
final Registry<org.apache.hc.client5.http.socket.ConnectionSocketFactory> socketFactoryRegistry,
final PoolConcurrencyPolicy poolConcurrencyPolicy,
final PoolReusePolicy poolReusePolicy,
final TimeValue timeToLive) {
this(socketFactoryRegistry, poolConcurrencyPolicy, poolReusePolicy, timeToLive, null);
}
/**
* @deprecated Use {@link PoolingHttpClientConnectionManagerBuilder}
*/
@Deprecated
public PoolingHttpClientConnectionManager(
final Registry<org.apache.hc.client5.http.socket.ConnectionSocketFactory> socketFactoryRegistry,
final PoolConcurrencyPolicy poolConcurrencyPolicy,
final PoolReusePolicy poolReusePolicy,
final TimeValue timeToLive,
final HttpConnectionFactory<ManagedHttpClientConnection> connFactory) {
this(socketFactoryRegistry, poolConcurrencyPolicy, poolReusePolicy, timeToLive, null, null, connFactory);
}
/**
* @deprecated Use {@link PoolingHttpClientConnectionManagerBuilder}
*/
@Deprecated
public PoolingHttpClientConnectionManager(
final Registry<org.apache.hc.client5.http.socket.ConnectionSocketFactory> socketFactoryRegistry,
final PoolConcurrencyPolicy poolConcurrencyPolicy,
final PoolReusePolicy poolReusePolicy,
final TimeValue timeToLive,
final SchemePortResolver schemePortResolver,
final DnsResolver dnsResolver,
final HttpConnectionFactory<ManagedHttpClientConnection> connFactory) {
this(new DefaultHttpClientConnectionOperator(socketFactoryRegistry, schemePortResolver, dnsResolver),
poolConcurrencyPolicy,
poolReusePolicy,
timeToLive,
connFactory);
}
@Internal
public PoolingHttpClientConnectionManager(
final HttpClientConnectionOperator httpClientConnectionOperator,
final PoolConcurrencyPolicy poolConcurrencyPolicy,
final PoolReusePolicy poolReusePolicy,
final TimeValue timeToLive,
final HttpConnectionFactory<ManagedHttpClientConnection> connFactory) {
this(httpClientConnectionOperator,poolConcurrencyPolicy,poolReusePolicy,timeToLive,connFactory,false);
}
@Internal
public PoolingHttpClientConnectionManager(
final HttpClientConnectionOperator httpClientConnectionOperator,
final PoolConcurrencyPolicy poolConcurrencyPolicy,
final PoolReusePolicy poolReusePolicy,
final TimeValue timeToLive,
final HttpConnectionFactory<ManagedHttpClientConnection> connFactory,
final boolean offLockDisposalEnabled) {
super();
this.connectionOperator = Args.notNull(httpClientConnectionOperator, "Connection operator");
this.defaultDisposal = new DefaultDisposalCallback<>();
this.offLockDisposer = offLockDisposalEnabled ? new OffLockDisposalCallback<>(this.defaultDisposal) : null;
final DisposalCallback<ManagedHttpClientConnection> callbackForPool = offLockDisposalEnabled ? this.offLockDisposer : this.defaultDisposal;
switch (poolConcurrencyPolicy != null ? poolConcurrencyPolicy : PoolConcurrencyPolicy.STRICT) {
case STRICT:
this.pool = new StrictConnPool<HttpRoute, ManagedHttpClientConnection>(
DEFAULT_MAX_CONNECTIONS_PER_ROUTE,
DEFAULT_MAX_TOTAL_CONNECTIONS,
timeToLive,
poolReusePolicy,
callbackForPool,
null) {
@Override
public void closeExpired() {
enumAvailable(e -> closeIfExpired(e));
}
};
break;
case LAX:
this.pool = new LaxConnPool<HttpRoute, ManagedHttpClientConnection>(
DEFAULT_MAX_CONNECTIONS_PER_ROUTE,
timeToLive,
poolReusePolicy,
callbackForPool,
null) {
@Override
public void closeExpired() {
enumAvailable(e -> closeIfExpired(e));
}
};
break;
default:
throw new IllegalArgumentException("Unexpected PoolConcurrencyPolicy value: " + poolConcurrencyPolicy);
}
this.connFactory = connFactory != null ? connFactory : ManagedHttpClientConnectionFactory.INSTANCE;
this.closed = new AtomicBoolean(false);
}
@Internal
protected PoolingHttpClientConnectionManager(
final HttpClientConnectionOperator httpClientConnectionOperator,
final ManagedConnPool<HttpRoute, ManagedHttpClientConnection> pool,
final HttpConnectionFactory<ManagedHttpClientConnection> connFactory) {
super();
this.connectionOperator = Args.notNull(httpClientConnectionOperator, "Connection operator");
this.pool = Args.notNull(pool, "Connection pool");
this.connFactory = connFactory != null ? connFactory : ManagedHttpClientConnectionFactory.INSTANCE;
this.closed = new AtomicBoolean(false);
this.defaultDisposal = null;
this.offLockDisposer = null;
}
@Override
public void close() {
close(CloseMode.GRACEFUL);
}
@Override
public void close(final CloseMode closeMode) {
if (this.closed.compareAndSet(false, true)) {
if (LOG.isDebugEnabled()) {
LOG.debug("Shutdown connection pool {}", closeMode);
}
this.pool.close(closeMode);
drainDisposals();
LOG.debug("Connection pool shut down");
}
}
private InternalConnectionEndpoint cast(final ConnectionEndpoint endpoint) {
if (endpoint instanceof InternalConnectionEndpoint) {
return (InternalConnectionEndpoint) endpoint;
}
throw new IllegalStateException("Unexpected endpoint class: " + endpoint.getClass());
}
private SocketConfig resolveSocketConfig(final HttpRoute route) {
final Resolver<HttpRoute, SocketConfig> resolver = this.socketConfigResolver;
final SocketConfig socketConfig = resolver != null ? resolver.resolve(route) : null;
return socketConfig != null ? socketConfig : SocketConfig.DEFAULT;
}
private ConnectionConfig resolveConnectionConfig(final HttpRoute route) {
final Resolver<HttpRoute, ConnectionConfig> resolver = this.connectionConfigResolver;
final ConnectionConfig connectionConfig = resolver != null ? resolver.resolve(route) : null;
return connectionConfig != null ? connectionConfig : ConnectionConfig.DEFAULT;
}
private TlsConfig resolveTlsConfig(final HttpHost host) {
final Resolver<HttpHost, TlsConfig> resolver = this.tlsConfigResolver;
final TlsConfig tlsConfig = resolver != null ? resolver.resolve(host) : null;
return tlsConfig != null ? tlsConfig : TlsConfig.DEFAULT;
}
private TimeValue resolveValidateAfterInactivity(final ConnectionConfig connectionConfig) {
final TimeValue timeValue = connectionConfig.getValidateAfterInactivity();
return timeValue != null ? timeValue : TimeValue.ofSeconds(2);
}
public LeaseRequest lease(final String id, final HttpRoute route, final Object state) {
return lease(id, route, Timeout.DISABLED, state);
}
@Override
public LeaseRequest lease(
final String id,
final HttpRoute route,
final Timeout requestTimeout,
final Object state) {
Args.notNull(route, "HTTP route");
if (LOG.isDebugEnabled()) {
LOG.debug("{} endpoint lease request ({}) {}", id, requestTimeout, ConnPoolSupport.formatStats(route, state, pool));
}
final Future<PoolEntry<HttpRoute, ManagedHttpClientConnection>> leaseFuture = this.pool.lease(route, state, requestTimeout, null);
return new LeaseRequest() {
// Using a ReentrantLock specific to each LeaseRequest instance to maintain the original
// synchronization semantics. This ensures that each LeaseRequest has its own unique lock.
private final ReentrantLock lock = new ReentrantLock();
private volatile ConnectionEndpoint endpoint;
@Override
public ConnectionEndpoint get(
final Timeout timeout) throws InterruptedException, ExecutionException, TimeoutException {
lock.lock();
try {
Args.notNull(timeout, "Operation timeout");
if (this.endpoint != null) {
return this.endpoint;
}
final PoolEntry<HttpRoute, ManagedHttpClientConnection> poolEntry;
try {
poolEntry = leaseFuture.get(timeout.getDuration(), timeout.getTimeUnit());
} catch (final TimeoutException ex) {
leaseFuture.cancel(true);
throw ex;
}
if (LOG.isDebugEnabled()) {
LOG.debug("{} endpoint leased {}", id, ConnPoolSupport.formatStats(route, state, pool));
}
final ConnectionConfig connectionConfig = resolveConnectionConfig(route);
try {
if (poolEntry.hasConnection()) {
final TimeValue timeToLive = connectionConfig.getTimeToLive();
if (TimeValue.isNonNegative(timeToLive)) {
if (timeToLive.getDuration() == 0
|| Deadline.calculate(poolEntry.getCreated(), timeToLive).isExpired()) {
poolEntry.discardConnection(CloseMode.GRACEFUL);
}
}
}
if (poolEntry.hasConnection()) {
final TimeValue idleTimeout = connectionConfig.getIdleTimeout();
if (TimeValue.isPositive(idleTimeout)) {
if (Deadline.calculate(poolEntry.getUpdated(), idleTimeout).isExpired()) {
poolEntry.discardConnection(CloseMode.GRACEFUL);
}
}
}
if (poolEntry.hasConnection()) {
final TimeValue timeValue = resolveValidateAfterInactivity(connectionConfig);
if (TimeValue.isNonNegative(timeValue)) {
if (timeValue.getDuration() == 0
|| Deadline.calculate(poolEntry.getUpdated(), timeValue).isExpired()) {
final ManagedHttpClientConnection conn = poolEntry.getConnection();
boolean stale;
try {
stale = conn.isStale();
} catch (final IOException ignore) {
stale = true;
}
if (stale) {
if (LOG.isDebugEnabled()) {
LOG.debug("{} connection {} is stale", id, ConnPoolSupport.getId(conn));
}
poolEntry.discardConnection(CloseMode.IMMEDIATE);
}
}
}
}
// Single drain point under the lease lock.
drainDisposals();
final ManagedHttpClientConnection conn = poolEntry.getConnection();
if (conn != null) {
conn.activate();
if (connectionConfig.getSocketTimeout() != null) {
conn.setSocketTimeout(connectionConfig.getSocketTimeout());
}
} else {
poolEntry.assignConnection(connFactory.createConnection(null));
}
this.endpoint = new InternalConnectionEndpoint(poolEntry);
if (LOG.isDebugEnabled()) {
LOG.debug("{} acquired {}", id, ConnPoolSupport.getId(endpoint));
}
return this.endpoint;
} catch (final Exception ex) {
if (LOG.isDebugEnabled()) {
LOG.debug("{} endpoint lease failed", id);
}
pool.release(poolEntry, false);
throw new ExecutionException(ex.getMessage(), ex);
}
} finally {
lock.unlock();
}
}
@Override
public boolean cancel() {
return leaseFuture.cancel(true);
}
};
}
@Override
public void release(final ConnectionEndpoint endpoint, final Object state, final TimeValue keepAlive) {
Args.notNull(endpoint, "Managed endpoint");
final PoolEntry<HttpRoute, ManagedHttpClientConnection> entry = cast(endpoint).detach();
if (entry == null) {
return;
}
if (LOG.isDebugEnabled()) {
LOG.debug("{} releasing endpoint", ConnPoolSupport.getId(endpoint));
}
if (this.isClosed()) {
return;
}
final ManagedHttpClientConnection conn = entry.getConnection();
if (conn != null && keepAlive == null) {
conn.close(CloseMode.GRACEFUL);
}
boolean reusable = conn != null && conn.isOpen() && conn.isConsistent();
try {
if (reusable) {
entry.updateState(state);
entry.updateExpiry(keepAlive);
conn.passivate();
if (LOG.isDebugEnabled()) {
final String s;
if (TimeValue.isPositive(keepAlive)) {
s = "for " + keepAlive;
} else {
s = "indefinitely";
}
LOG.debug("{} connection {} can be kept alive {}", ConnPoolSupport.getId(endpoint), ConnPoolSupport.getId(conn), s);
}
} else {
if (LOG.isDebugEnabled()) {
if (conn != null && !conn.isConsistent()) {
LOG.debug("{} connection is in an inconsistent state and cannot be kept alive", ConnPoolSupport.getId(endpoint));
} else {
LOG.debug("{} connection is not kept alive", ConnPoolSupport.getId(endpoint));
}
}
}
} catch (final RuntimeException ex) {
reusable = false;
throw ex;
} finally {
this.pool.release(entry, reusable);
if (LOG.isDebugEnabled()) {
LOG.debug("{} connection released {}", ConnPoolSupport.getId(endpoint), ConnPoolSupport.formatStats(entry.getRoute(), entry.getState(), pool));
}
drainDisposals();
}
}
@Override
public void connect(final ConnectionEndpoint endpoint, final TimeValue timeout, final HttpContext context) throws IOException {
Args.notNull(endpoint, "Managed endpoint");
final InternalConnectionEndpoint internalEndpoint = cast(endpoint);
if (internalEndpoint.isConnected()) {
return;
}
final PoolEntry<HttpRoute, ManagedHttpClientConnection> poolEntry = internalEndpoint.getPoolEntry();
if (!poolEntry.hasConnection()) {
poolEntry.assignConnection(connFactory.createConnection(null));
}
final HttpRoute route = poolEntry.getRoute();
final Path unixDomainSocket = route.getUnixDomainSocket();
final HttpHost firstHop = route.getProxyHost() != null ? route.getProxyHost() : route.getTargetHost();
final SocketConfig socketConfig = resolveSocketConfig(route);
final ConnectionConfig connectionConfig = resolveConnectionConfig(route);
final Timeout connectTimeout = timeout != null ? Timeout.of(timeout.getDuration(), timeout.getTimeUnit()) : connectionConfig.getConnectTimeout();
if (LOG.isDebugEnabled()) {
LOG.debug("{} connecting endpoint to {} ({})", ConnPoolSupport.getId(endpoint), firstHop, connectTimeout);
}
final ManagedHttpClientConnection conn = poolEntry.getConnection();
this.connectionOperator.connect(
conn,
firstHop,
route.getTargetName(),
unixDomainSocket,
route.getLocalSocketAddress(),
connectTimeout,
socketConfig,
route.isTunnelled() ? null : resolveTlsConfig(route.getTargetHost()),
context);
if (LOG.isDebugEnabled()) {
LOG.debug("{} connected {}", ConnPoolSupport.getId(endpoint), ConnPoolSupport.getId(conn));
}
final Timeout socketTimeout = connectionConfig.getSocketTimeout();
if (socketTimeout != null) {
conn.setSocketTimeout(socketTimeout);
}
}
@Override
public void upgrade(final ConnectionEndpoint endpoint, final HttpContext context) throws IOException {
Args.notNull(endpoint, "Managed endpoint");
final InternalConnectionEndpoint internalEndpoint = cast(endpoint);
final PoolEntry<HttpRoute, ManagedHttpClientConnection> poolEntry = internalEndpoint.getValidatedPoolEntry();
final HttpRoute route = poolEntry.getRoute();
final HttpHost target = route.getTargetHost();
final TlsConfig tlsConfig = resolveTlsConfig(target);
this.connectionOperator.upgrade(
poolEntry.getConnection(),
target,
route.getTargetName(),
tlsConfig,
context);
}
@Override
public void closeIdle(final TimeValue idleTime) {
Args.notNull(idleTime, "Idle time");
if (LOG.isDebugEnabled()) {
LOG.debug("Closing connections idle longer than {}", idleTime);
}
if (isClosed()) {
return;
}
this.pool.closeIdle(idleTime);
drainDisposals();
}
@Override
public void closeExpired() {
if (isClosed()) {
return;
}
LOG.debug("Closing expired connections");
this.pool.closeExpired();
drainDisposals();
}
@Override
public Set<HttpRoute> getRoutes() {
return this.pool.getRoutes();
}
@Override
public int getMaxTotal() {
return this.pool.getMaxTotal();
}
@Override
public void setMaxTotal(final int max) {
this.pool.setMaxTotal(max);
}
@Override
public int getDefaultMaxPerRoute() {
return this.pool.getDefaultMaxPerRoute();
}
@Override
public void setDefaultMaxPerRoute(final int max) {
this.pool.setDefaultMaxPerRoute(max);
}
@Override
public int getMaxPerRoute(final HttpRoute route) {
return this.pool.getMaxPerRoute(route);
}
@Override
public void setMaxPerRoute(final HttpRoute route, final int max) {
this.pool.setMaxPerRoute(route, max);
}
@Override
public PoolStats getTotalStats() {
return this.pool.getTotalStats();
}
@Override
public PoolStats getStats(final HttpRoute route) {
return this.pool.getStats(route);
}
/**
* Sets the same {@link SocketConfig} for all routes
*/
public void setDefaultSocketConfig(final SocketConfig config) {
this.socketConfigResolver = route -> config;
}
/**
* Sets {@link Resolver} of {@link SocketConfig} on a per route basis.
*
* @since 5.2
*/
public void setSocketConfigResolver(final Resolver<HttpRoute, SocketConfig> socketConfigResolver) {
this.socketConfigResolver = socketConfigResolver;
}
/**
* Sets the same {@link ConnectionConfig} for all routes
*
* @since 5.2
*/
public void setDefaultConnectionConfig(final ConnectionConfig config) {
this.connectionConfigResolver = route -> config;
}
/**
* Sets {@link Resolver} of {@link ConnectionConfig} on a per route basis.
*
* @since 5.2
*/
public void setConnectionConfigResolver(final Resolver<HttpRoute, ConnectionConfig> connectionConfigResolver) {
this.connectionConfigResolver = connectionConfigResolver;
}
/**
* Sets the same {@link ConnectionConfig} for all hosts
*
* @since 5.2
*/
public void setDefaultTlsConfig(final TlsConfig config) {
this.tlsConfigResolver = host -> config;
}
/**
* Sets {@link Resolver} of {@link TlsConfig} on a per host basis.
*
* @since 5.2
*/
public void setTlsConfigResolver(final Resolver<HttpHost, TlsConfig> tlsConfigResolver) {
this.tlsConfigResolver = tlsConfigResolver;
}
void closeIfExpired(final PoolEntry<HttpRoute, ManagedHttpClientConnection> entry) {
final long now = System.currentTimeMillis();
if (entry.getExpiryDeadline().isBefore(now)) {
entry.discardConnection(CloseMode.GRACEFUL);
} else {
final ConnectionConfig connectionConfig = resolveConnectionConfig(entry.getRoute());
final TimeValue timeToLive = connectionConfig.getTimeToLive();
if (timeToLive != null && Deadline.calculate(entry.getCreated(), timeToLive).isBefore(now)) {
entry.discardConnection(CloseMode.GRACEFUL);
}
}
}
/**
* @deprecated Use custom {@link #setConnectionConfigResolver(Resolver)}
*/
@Deprecated
public SocketConfig getDefaultSocketConfig() {
return SocketConfig.DEFAULT;
}
/**
* @since 4.4
*
* @deprecated Use {@link #setConnectionConfigResolver(Resolver)}.
*/
@Deprecated
public TimeValue getValidateAfterInactivity() {
return ConnectionConfig.DEFAULT.getValidateAfterInactivity();
}
/**
* Defines period of inactivity after which persistent connections must
* be re-validated prior to being {@link #lease(String, HttpRoute, Object)} leased} to the consumer.
* Negative values passed to this method disable connection validation. This check helps
* detect connections that have become stale (half-closed) while kept inactive in the pool.
*
* @since 4.4
*
* @deprecated Use {@link #setConnectionConfigResolver(Resolver)}.
*/
@Deprecated
public void setValidateAfterInactivity(final TimeValue validateAfterInactivity) {
setDefaultConnectionConfig(ConnectionConfig.custom()
.setValidateAfterInactivity(validateAfterInactivity)
.build());
}
private static final PrefixedIncrementingId INCREMENTING_ID = new PrefixedIncrementingId("ep-");
static class InternalConnectionEndpoint extends ConnectionEndpoint implements ConnectionHolder, Identifiable {
private final AtomicReference<PoolEntry<HttpRoute, ManagedHttpClientConnection>> poolEntryRef;
private final String id;
InternalConnectionEndpoint(
final PoolEntry<HttpRoute, ManagedHttpClientConnection> poolEntry) {
this.poolEntryRef = new AtomicReference<>(poolEntry);
this.id = INCREMENTING_ID.getNextId();
}
@Override
public String getId() {
return id;
}
PoolEntry<HttpRoute, ManagedHttpClientConnection> getPoolEntry() {
final PoolEntry<HttpRoute, ManagedHttpClientConnection> poolEntry = poolEntryRef.get();
if (poolEntry == null) {
throw new ConnectionShutdownException();
}
return poolEntry;
}
PoolEntry<HttpRoute, ManagedHttpClientConnection> getValidatedPoolEntry() {
final PoolEntry<HttpRoute, ManagedHttpClientConnection> poolEntry = getPoolEntry();
final ManagedHttpClientConnection connection = poolEntry.getConnection();
if (connection == null || !connection.isOpen()) {
throw new ConnectionShutdownException();
}
return poolEntry;
}
PoolEntry<HttpRoute, ManagedHttpClientConnection> detach() {
return poolEntryRef.getAndSet(null);
}
@Override
public void close(final CloseMode closeMode) {
final PoolEntry<HttpRoute, ManagedHttpClientConnection> poolEntry = poolEntryRef.get();
if (poolEntry != null) {
poolEntry.discardConnection(closeMode);
}
}
@Override
public void close() throws IOException {
final PoolEntry<HttpRoute, ManagedHttpClientConnection> poolEntry = poolEntryRef.get();
if (poolEntry != null) {
poolEntry.discardConnection(CloseMode.GRACEFUL);
}
}
@Override
public boolean isConnected() {
final PoolEntry<HttpRoute, ManagedHttpClientConnection> poolEntry = getPoolEntry();
final ManagedHttpClientConnection connection = poolEntry.getConnection();
return connection != null && connection.isOpen();
}
@Override
public void setSocketTimeout(final Timeout timeout) {
getValidatedPoolEntry().getConnection().setSocketTimeout(timeout);
}
/**
* @deprecated Use {@link #execute(String, ClassicHttpRequest, RequestExecutor, HttpContext)}
*/
@Deprecated
@Override
public ClassicHttpResponse execute(
final String exchangeId,
final ClassicHttpRequest request,
final HttpRequestExecutor requestExecutor,
final HttpContext context) throws IOException, HttpException {
Args.notNull(request, "HTTP request");
Args.notNull(requestExecutor, "Request executor");
final ManagedHttpClientConnection connection = getValidatedPoolEntry().getConnection();
if (LOG.isDebugEnabled()) {
LOG.debug("{} executing exchange {} over {}", id, exchangeId, ConnPoolSupport.getId(connection));
}
return requestExecutor.execute(request, connection, context);
}
/**
* @since 5.4
*/
@Override
public ClassicHttpResponse execute(
final String exchangeId,
final ClassicHttpRequest request,
final RequestExecutor requestExecutor,
final HttpContext context) throws IOException, HttpException {
Args.notNull(request, "HTTP request");
Args.notNull(requestExecutor, "Request executor");
final ManagedHttpClientConnection connection = getValidatedPoolEntry().getConnection();
if (LOG.isDebugEnabled()) {
LOG.debug("{} executing exchange {} over {}", id, exchangeId, ConnPoolSupport.getId(connection));
}
return requestExecutor.execute(request, connection, context);
}
/**
* @since 5.4
*/
@Override
public EndpointInfo getInfo() {
final PoolEntry<HttpRoute, ManagedHttpClientConnection> poolEntry = poolEntryRef.get();
if (poolEntry != null) {
final ManagedHttpClientConnection connection = poolEntry.getConnection();
if (connection != null && connection.isOpen()) {
return new EndpointInfo(connection.getProtocolVersion(), connection.getSSLSession());
}
}
return null;
}
@Override
public HttpConnection get() {
final PoolEntry<HttpRoute, ManagedHttpClientConnection> poolEntry = poolEntryRef.get();
return poolEntry != null ? poolEntry.getConnection() : null;
}
}
/**
* Returns whether this connection manager has been shut down.
*
* @since 5.4
*/
public boolean isClosed() {
return this.closed.get();
}
private void drainDisposals() {
if (offLockDisposer != null) {
offLockDisposer.drain();
}
}
}
|
google/closure-stylesheets | 37,151 | tests/com/google/common/css/compiler/ast/DefaultVisitControllerTest.java | /*
* Copyright 2009 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.common.css.compiler.ast;
import static com.google.common.truth.Truth.assertThat;
import static com.google.common.truth.Truth.assertWithMessage;
import static org.mockito.Matchers.any;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.verify;
import static org.mockito.Mockito.when;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.Lists;
import com.google.common.css.SourceCode;
import com.google.common.css.compiler.ast.CssAttributeSelectorNode.MatchType;
import com.google.common.css.compiler.ast.CssCompositeValueNode.Operator;
import com.google.common.css.compiler.ast.CssFunctionNode.Function;
import com.google.common.css.compiler.ast.CssStringNode.Type;
import com.google.common.css.compiler.ast.DefaultVisitController.RootVisitAfterChildrenState;
import com.google.common.css.compiler.ast.DefaultVisitController.RootVisitBeforeChildrenState;
import com.google.common.css.compiler.ast.DefaultVisitController.RootVisitBodyState;
import com.google.common.css.compiler.ast.DefaultVisitController.RootVisitCharsetState;
import com.google.common.css.compiler.ast.DefaultVisitController.RootVisitImportBlockState;
import com.google.common.css.compiler.ast.DefaultVisitController.VisitBlockChildrenState;
import com.google.common.css.compiler.ast.DefaultVisitController.VisitDefinitionState;
import com.google.common.css.compiler.ast.DefaultVisitController.VisitImportBlockChildrenState;
import com.google.common.css.compiler.ast.DefaultVisitController.VisitImportRuleState;
import com.google.common.css.compiler.ast.DefaultVisitController.VisitReplaceChildrenState;
import java.util.List;
import org.junit.Before;
import org.junit.Test;
import org.junit.runner.RunWith;
import org.junit.runners.JUnit4;
import org.mockito.ArgumentCaptor;
import org.mockito.InOrder;
import org.mockito.Matchers;
import org.mockito.Mockito;
/**
* Unit tests for {@link DefaultVisitController}.
*
* <p>TODO(oana): Add more unit tests.
*
* @author oana@google.com (Oana Florescu)
*/
@RunWith(JUnit4.class)
public class DefaultVisitControllerTest {
DefaultTreeVisitor testVisitor = mock(DefaultTreeVisitor.class);
@Before
public void setUp() throws Exception {
when(testVisitor.enterTree(Matchers.<CssRootNode>any())).thenReturn(true);
when(testVisitor.enterImportBlock(Matchers.<CssImportBlockNode>any())).thenReturn(true);
when(testVisitor.enterBlock(Matchers.<CssBlockNode>any())).thenReturn(true);
when(testVisitor.enterDefinition(Matchers.<CssDefinitionNode>any())).thenReturn(true);
when(testVisitor.enterRuleset(Matchers.<CssRulesetNode>any())).thenReturn(true);
when(testVisitor.enterSelectorBlock(Matchers.<CssSelectorListNode>any())).thenReturn(true);
when(testVisitor.enterSelector(Matchers.<CssSelectorNode>any())).thenReturn(true);
when(testVisitor.enterClassSelector(Matchers.<CssClassSelectorNode>any())).thenReturn(true);
when(testVisitor.enterIdSelector(Matchers.<CssIdSelectorNode>any())).thenReturn(true);
when(testVisitor.enterPseudoClass(Matchers.<CssPseudoClassNode>any())).thenReturn(true);
when(testVisitor.enterPseudoElement(Matchers.<CssPseudoElementNode>any())).thenReturn(true);
when(testVisitor.enterAttributeSelector(Matchers.<CssAttributeSelectorNode>any()))
.thenReturn(true);
when(testVisitor.enterDeclarationBlock(Matchers.<CssDeclarationBlockNode>any()))
.thenReturn(true);
when(testVisitor.enterDeclaration(Matchers.<CssDeclarationNode>any())).thenReturn(true);
when(testVisitor.enterPropertyValue(Matchers.<CssPropertyValueNode>any())).thenReturn(true);
when(testVisitor.enterValueNode(Matchers.<CssValueNode>any())).thenReturn(true);
when(testVisitor.enterUnknownAtRule(Matchers.<CssUnknownAtRuleNode>any())).thenReturn(true);
when(testVisitor.enterMediaTypeListDelimiter(
Matchers.<CssNodesListNode<? extends CssNode>>any()))
.thenReturn(true);
when(testVisitor.enterForLoop(Matchers.<CssForLoopRuleNode>any())).thenReturn(true);
when(testVisitor.enterComponent(Matchers.<CssComponentNode>any())).thenReturn(true);
}
@Test
public void testConstructor() {
DefaultVisitController visitController = new DefaultVisitController(
new CssTree((SourceCode) null), false);
assertThat(visitController.getStateStack().isEmpty()).isTrue();
}
@Test
public void testVisitBlock() {
CssLiteralNode literal = new CssLiteralNode("");
CssDefinitionNode def = new CssDefinitionNode(literal);
CssBlockNode block = new CssBlockNode(false);
block.addChildToBack(def);
CssRootNode root = new CssRootNode(block);
CssTree tree = new CssTree(null, root);
DefaultVisitController controller = new DefaultVisitController(tree, true);
controller.startVisit(testVisitor);
InOrder inOrder = Mockito.inOrder(testVisitor);
// Enter Tree gets the root - there is no enterRoot.
inOrder.verify(testVisitor).enterTree(root);
// There are blocks that get created even if you don't add them.
inOrder.verify(testVisitor).enterImportBlock(Matchers.<CssImportBlockNode>any());
// Then we enter the block.
inOrder.verify(testVisitor).enterBlock(block);
// Then another node that we created.
inOrder.verify(testVisitor).enterDefinition(def);
}
@Test
public void testVisitProperties() {
CssValueNode first = new CssLiteralNode("one", null);
CssValueNode second = new CssLiteralNode("two", null);
CssValueNode third = new CssLiteralNode("three", null);
CssPropertyNode propName = new CssPropertyNode("prop");
CssPropertyValueNode propValue = new CssPropertyValueNode();
propValue.addChildToBack(first);
propValue.addChildToBack(second);
propValue.addChildToBack(third);
CssDeclarationNode decl = new CssDeclarationNode(propName, propValue);
CssDeclarationBlockNode dBlock = new CssDeclarationBlockNode();
dBlock.addChildToBack(decl);
CssClassSelectorNode classSelector = new CssClassSelectorNode("foo", null);
CssIdSelectorNode idSelector = new CssIdSelectorNode("bar", null);
CssPseudoClassNode pseudoClass = new CssPseudoClassNode("foo", null);
CssPseudoElementNode pseudoElement = new CssPseudoElementNode("bar", null);
CssAttributeSelectorNode attrSelector = new CssAttributeSelectorNode(
MatchType.EXACT, "hreflang",
new CssStringNode(
CssStringNode.Type.DOUBLE_QUOTED_STRING, "en"), null);
CssSelectorNode selector = new CssSelectorNode("name", null);
selector.getRefiners().addChildToBack(classSelector);
selector.getRefiners().addChildToBack(idSelector);
selector.getRefiners().addChildToBack(pseudoClass);
selector.getRefiners().addChildToBack(pseudoElement);
selector.getRefiners().addChildToBack(attrSelector);
CssRulesetNode ruleset = new CssRulesetNode(dBlock);
ruleset.addSelector(selector);
CssBlockNode block = new CssBlockNode(false);
block.addChildToBack(ruleset);
CssRootNode root = new CssRootNode(block);
CssTree tree = new CssTree(null, root);
DefaultVisitController controller = new DefaultVisitController(tree, true);
controller.startVisit(testVisitor);
InOrder inOrder = Mockito.inOrder(testVisitor);
inOrder.verify(testVisitor).enterTree(root);
inOrder.verify(testVisitor).enterImportBlock(Matchers.<CssImportBlockNode>any());
inOrder.verify(testVisitor).enterBlock(block);
inOrder.verify(testVisitor).enterRuleset(ruleset);
inOrder.verify(testVisitor).enterSelectorBlock(Matchers.<CssSelectorListNode>any());
inOrder.verify(testVisitor).enterSelector(selector);
inOrder.verify(testVisitor).enterClassSelector(classSelector);
inOrder.verify(testVisitor).enterIdSelector(idSelector);
inOrder.verify(testVisitor).enterPseudoClass(pseudoClass);
inOrder.verify(testVisitor).enterPseudoElement(pseudoElement);
inOrder.verify(testVisitor).enterAttributeSelector(attrSelector);
inOrder.verify(testVisitor).enterDeclarationBlock(dBlock);
inOrder.verify(testVisitor).enterDeclaration(decl);
inOrder.verify(testVisitor).enterPropertyValue(propValue);
inOrder.verify(testVisitor).enterValueNode(first);
inOrder.verify(testVisitor).enterValueNode(second);
inOrder.verify(testVisitor).enterValueNode(third);
}
@Test
public void testStateStack() {
CssTree tree = new CssTree((SourceCode) null);
DefaultVisitController visitController = new DefaultVisitController(
tree, false);
RootVisitBeforeChildrenState state
= visitController.new RootVisitBeforeChildrenState(tree.getRoot());
visitController.getStateStack().push(state);
assertThat(visitController.getStateStack().getTop()).isEqualTo(state);
assertThat(visitController.getStateStack().size()).isEqualTo(1);
visitController.getStateStack().pop();
assertThat(visitController.getStateStack().isEmpty()).isTrue();
}
@Test
public void testRootVisitBeforeChildrenState() {
CssTree tree = new CssTree((SourceCode) null);
DefaultVisitController visitController = new DefaultVisitController(
tree, false);
RootVisitBeforeChildrenState state
= visitController.new RootVisitBeforeChildrenState(tree.getRoot());
visitController.getStateStack().push(state);
assertThat(visitController.getStateStack().getTop()).isEqualTo(state);
state.transitionToNextState();
assertThat(visitController.getStateStack().size()).isEqualTo(1);
assertThat(visitController.getStateStack().getTop()).isInstanceOf(RootVisitCharsetState.class);
}
@Test
public void testRootVisitCharsetState() {
CssTree tree = new CssTree((SourceCode) null);
DefaultVisitController visitController = new DefaultVisitController(
tree, true);
RootVisitCharsetState state =
visitController.new RootVisitCharsetState(tree.getRoot(), tree.getRoot().getCharsetRule());
visitController.getStateStack().push(state);
assertThat(visitController.getStateStack().getTop()).isEqualTo(state);
visitController.removeCurrentNode();
assertThat(tree.getRoot().getCharsetRule()).isNull();
state.transitionToNextState();
assertThat(visitController.getStateStack().size()).isEqualTo(1);
assertThat(visitController.getStateStack().getTop())
.isInstanceOf(RootVisitImportBlockState.class);
}
@Test
public void testRootVisitImportBlockState() {
CssTree tree = new CssTree((SourceCode) null);
DefaultVisitController visitController = new DefaultVisitController(
tree, true);
RootVisitImportBlockState state =
visitController
.new RootVisitImportBlockState(tree.getRoot(), tree.getRoot().getImportRules());
visitController.getStateStack().push(state);
assertThat(visitController.getStateStack().getTop()).isEqualTo(state);
state.transitionToNextState();
assertThat(visitController.getStateStack().size()).isEqualTo(2);
assertThat(visitController.getStateStack().getTop())
.isInstanceOf(VisitImportBlockChildrenState.class);
state.transitionToNextState();
assertThat(visitController.getStateStack().size()).isEqualTo(2);
assertThat(visitController.getStateStack().getTop()).isInstanceOf(RootVisitBodyState.class);
}
@Test
public void testVisitImportBlockChildrenState() {
CssTree tree = new CssTree((SourceCode) null);
DefaultVisitController visitController = new DefaultVisitController(
tree, true);
visitController.visitor = new DefaultTreeVisitor();
CssImportBlockNode cssImportBlockNode = new CssImportBlockNode();
cssImportBlockNode.setChildren(Lists.newArrayList(new CssImportRuleNode()));
VisitImportBlockChildrenState state
= visitController.new VisitImportBlockChildrenState(cssImportBlockNode);
visitController.getStateStack().push(state);
assertThat(visitController.getStateStack().getTop()).isEqualTo(state);
state.transitionToNextState();
assertThat(visitController.getStateStack().size()).isEqualTo(2);
assertThat(visitController.getStateStack().getTop()).isInstanceOf(VisitImportRuleState.class);
visitController.getStateStack().getTop().transitionToNextState();
assertThat(visitController.getStateStack().size()).isEqualTo(1);
assertThat(visitController.getStateStack().getTop())
.isInstanceOf(VisitImportBlockChildrenState.class);
state.transitionToNextState();
assertThat(visitController.getStateStack().isEmpty()).isTrue();
}
@Test
public void testVisitImportRuleState() {
CssTree tree = new CssTree((SourceCode) null);
DefaultVisitController visitController = new DefaultVisitController(
tree, true);
visitController.visitor = new DefaultTreeVisitor();
VisitImportRuleState state
= visitController.new VisitImportRuleState(new CssImportRuleNode());
visitController.getStateStack().push(state);
assertThat(visitController.getStateStack().getTop()).isEqualTo(state);
state.transitionToNextState();
assertThat(visitController.getStateStack().isEmpty()).isTrue();
}
@Test
public void testRootVisitBodyState() {
CssTree tree = new CssTree((SourceCode) null);
DefaultVisitController visitController = new DefaultVisitController(
tree, true);
visitController.visitor = new DefaultTreeVisitor();
RootVisitBodyState state =
visitController.new RootVisitBodyState(tree.getRoot(), tree.getRoot().getBody());
visitController.getStateStack().push(state);
assertThat(visitController.getStateStack().getTop()).isEqualTo(state);
state.transitionToNextState();
assertThat(visitController.getStateStack().size()).isEqualTo(2);
assertThat(visitController.getStateStack().getTop())
.isInstanceOf(VisitBlockChildrenState.class);
visitController.getStateStack().getTop().transitionToNextState();
assertThat(visitController.getStateStack().size()).isEqualTo(1);
assertThat(visitController.getStateStack().getTop()).isInstanceOf(RootVisitBodyState.class);
state.transitionToNextState();
assertThat(visitController.getStateStack().size()).isEqualTo(1);
assertThat(visitController.getStateStack().getTop())
.isInstanceOf(RootVisitAfterChildrenState.class);
}
@Test
public void testVisitBlockChildrenState1() {
CssDefinitionNode def = new CssDefinitionNode(new CssLiteralNode(""));
CssBlockNode block = new CssBlockNode(false);
block.addChildToBack(def);
CssRootNode root = new CssRootNode(block);
CssTree tree = new CssTree(null, root);
DefaultVisitController visitController = new DefaultVisitController(
tree, true);
visitController.visitor = new DefaultTreeVisitor();
VisitReplaceChildrenState<CssNode> state
= visitController.new VisitBlockChildrenState(tree.getRoot().getBody());
visitController.getStateStack().push(state);
assertThat(visitController.getStateStack().getTop()).isEqualTo(state);
assertThat(state.currentIndex).isEqualTo(-1);
state.transitionToNextState();
assertThat(visitController.getStateStack().size()).isEqualTo(2);
assertThat(visitController.getStateStack().getTop()).isInstanceOf(VisitDefinitionState.class);
assertThat(state.currentIndex).isEqualTo(0);
visitController.getStateStack().getTop().transitionToNextState();
assertThat(visitController.getStateStack().size()).isEqualTo(3);
state.transitionToNextState();
state.transitionToNextState();
state.transitionToNextState();
assertThat(visitController.getStateStack().isEmpty()).isTrue();
}
@Test
public void testVisitBlockChildrenState2() {
CssDefinitionNode def = new CssDefinitionNode(new CssLiteralNode(""));
CssBlockNode block = new CssBlockNode(false);
block.addChildToBack(def);
def = new CssDefinitionNode(new CssLiteralNode(""));
block.addChildToBack(def);
CssRootNode root = new CssRootNode(block);
CssTree tree = new CssTree(null, root);
DefaultVisitController visitController = new DefaultVisitController(
tree, true);
visitController.visitor = new DefaultTreeVisitor();
VisitReplaceChildrenState<CssNode> state
= visitController.new VisitBlockChildrenState(tree.getRoot().getBody());
visitController.getStateStack().push(state);
assertThat(visitController.getStateStack().getTop()).isEqualTo(state);
assertThat(state.currentIndex).isEqualTo(-1);
state.transitionToNextState();
assertThat(visitController.getStateStack().size()).isEqualTo(2);
assertThat(visitController.getStateStack().getTop()).isInstanceOf(VisitDefinitionState.class);
assertThat(state.currentIndex).isEqualTo(0);
state.removeCurrentChild();
assertThat(state.currentIndex).isEqualTo(0);
state.removeCurrentChild();
assertThat(state.currentIndex).isEqualTo(0);
visitController.getStateStack().getTop().transitionToNextState();
assertThat(visitController.getStateStack().size()).isEqualTo(3);
state.transitionToNextState();
state.transitionToNextState();
state.transitionToNextState();
assertThat(visitController.getStateStack().isEmpty()).isTrue();
assertThat(state.currentIndex).isEqualTo(0);
}
@Test
public void testVisitBlockChildrenState3() {
CssDefinitionNode def = new CssDefinitionNode(new CssLiteralNode(""));
CssBlockNode block = new CssBlockNode(false);
block.addChildToBack(def);
def = new CssDefinitionNode(new CssLiteralNode(""));
block.addChildToBack(def);
CssRootNode root = new CssRootNode(block);
CssTree tree = new CssTree(null, root);
DefaultVisitController visitController = new DefaultVisitController(
tree, true);
visitController.visitor = new DefaultTreeVisitor();
VisitReplaceChildrenState<CssNode> state
= visitController.new VisitBlockChildrenState(tree.getRoot().getBody());
visitController.getStateStack().push(state);
assertThat(visitController.getStateStack().getTop()).isEqualTo(state);
assertThat(state.currentIndex).isEqualTo(-1);
state.transitionToNextState();
assertThat(visitController.getStateStack().size()).isEqualTo(2);
assertThat(visitController.getStateStack().getTop()).isInstanceOf(VisitDefinitionState.class);
assertThat(state.currentIndex).isEqualTo(0);
state.replaceCurrentBlockChildWith(
Lists.<CssNode>newArrayList(
new CssDefinitionNode(new CssLiteralNode("")),
new CssDefinitionNode(new CssLiteralNode(""))),
true);
assertThat(state.currentIndex).isEqualTo(0);
state.removeCurrentChild();
assertThat(state.currentIndex).isEqualTo(0);
visitController.getStateStack().getTop().transitionToNextState();
assertThat(visitController.getStateStack().size()).isEqualTo(3);
}
@Test
public void testVisitSimpleUnknownAtRule() {
CssLiteralNode defLit = new CssLiteralNode("def");
CssUnknownAtRuleNode atDef = new CssUnknownAtRuleNode(defLit, false);
CssLiteralNode xLit = new CssLiteralNode("x");
CssLiteralNode yLit = new CssLiteralNode("y");
List<CssValueNode> defParameters = Lists.newArrayList((CssValueNode) xLit, (CssValueNode) yLit);
atDef.setParameters(defParameters);
CssBlockNode block = new CssBlockNode(false);
block.addChildToBack(atDef);
CssRootNode root = new CssRootNode(block);
CssTree tree = new CssTree(null, root);
DefaultVisitController controller = new DefaultVisitController(tree, true);
controller.startVisit(testVisitor);
InOrder inOrder = Mockito.inOrder(testVisitor);
// Enter Tree gets the root - there is no enterRoot.
inOrder.verify(testVisitor).enterTree(root);
// There are blocks that get created even if you don't add them.
inOrder.verify(testVisitor).enterImportBlock(Matchers.<CssImportBlockNode>any());
// Then we enter the block.
verify(testVisitor).enterBlock(block);
// Then we enter the unknown at-rule node that we created.
verify(testVisitor).enterUnknownAtRule(atDef);
// Then the media type list params
for (int i = 0; i < defParameters.size(); ++i) {
inOrder.verify(testVisitor).enterValueNode(defParameters.get(i));
if (i < defParameters.size() - 1) {
inOrder.verify(testVisitor).enterMediaTypeListDelimiter(atDef);
}
}
// We've got no block associated with this at rule.
}
@Test
public void testVisitComplexUnknownAtRule() {
CssLiteralNode defLit = new CssLiteralNode("def");
CssUnknownAtRuleNode atDef = new CssUnknownAtRuleNode(defLit, false);
CssLiteralNode xLit = new CssLiteralNode("x");
CssLiteralNode yLit = new CssLiteralNode("y");
List<CssValueNode> defParameters = Lists.newArrayList((CssValueNode) xLit, (CssValueNode) yLit);
atDef.setParameters(defParameters);
CssBlockNode defBlock = new CssBlockNode(true);
defBlock.addChildToBack(atDef);
CssLiteralNode ifLit = new CssLiteralNode("if");
CssUnknownAtRuleNode atIf = new CssUnknownAtRuleNode(ifLit, true);
CssBooleanExpressionNode ifCondition = new CssBooleanExpressionNode(
CssBooleanExpressionNode.Type.CONSTANT, "some condition", null, null);
List<CssValueNode> ifParameters = Lists.newArrayList((CssValueNode) ifCondition);
atIf.setParameters(ifParameters);
atIf.setBlock(defBlock);
CssBlockNode block = new CssBlockNode(false);
block.addChildToBack(atIf);
CssRootNode root = new CssRootNode(block);
CssTree tree = new CssTree(null, root);
DefaultVisitController controller = new DefaultVisitController(tree, true);
controller.startVisit(testVisitor);
InOrder inOrder = Mockito.inOrder(testVisitor);
// Enter Tree gets the root - there is no enterRoot.
inOrder.verify(testVisitor).enterTree(root);
// There are blocks that get created even if you don't add them.
inOrder.verify(testVisitor).enterImportBlock(Matchers.<CssImportBlockNode>any());
// Then we enter the block.
verify(testVisitor).enterBlock(block);
// Then we enter the unknown 'if' at-rule node that we created.
verify(testVisitor).enterUnknownAtRule(atIf);
// Then the media type list params for 'if'
for (int i = 0; i < ifParameters.size(); ++i) {
inOrder.verify(testVisitor).enterValueNode(ifParameters.get(i));
if (i < ifParameters.size() - 1) {
inOrder.verify(testVisitor).enterMediaTypeListDelimiter(atIf);
}
}
// Then we enter the defBlock.
inOrder.verify(testVisitor).enterBlock(defBlock);
// Then we enter the unknown 'def' at-rule node within the 'if'.
inOrder.verify(testVisitor).enterUnknownAtRule(atDef);
// Then the media type list params for 'def'
for (int i = 0; i < defParameters.size(); ++i) {
inOrder.verify(testVisitor).enterValueNode(defParameters.get(i));
if (i < defParameters.size() - 1) {
inOrder.verify(testVisitor).enterMediaTypeListDelimiter(atDef);
}
}
}
@Test
public void testVisitComponent() {
CssLiteralNode x = new CssLiteralNode("FOO");
CssDefinitionNode def = new CssDefinitionNode(x);
CssBlockNode compBlock = new CssBlockNode(true);
compBlock.addChildToBack(def);
CssLiteralNode compLit = new CssLiteralNode("CSS_BAR");
CssComponentNode comp = new CssComponentNode(compLit, null, false,
CssComponentNode.PrefixStyle.LITERAL, compBlock);
CssBlockNode block = new CssBlockNode(false);
block.addChildToBack(comp);
CssRootNode root = new CssRootNode(block);
CssTree tree = new CssTree(null, root);
DefaultVisitController controller = new DefaultVisitController(tree, true);
controller.startVisit(testVisitor);
InOrder inOrder = Mockito.inOrder(testVisitor);
// Enter Tree gets the root - there is no enterRoot.
inOrder.verify(testVisitor).enterTree(root);
// There are blocks that get created even if you don't add them.
inOrder.verify(testVisitor).enterImportBlock(Matchers.<CssImportBlockNode>any());
// Then we enter the block.
inOrder.verify(testVisitor).enterBlock(block);
// Then we enter the component that we created.
inOrder.verify(testVisitor).enterComponent(comp);
// Then we enter the definition within the component.
inOrder.verify(testVisitor).enterDefinition(def);
}
@Test
public void testVisitFunctionNode() {
CssDefinitionNode def = new CssDefinitionNode(new CssLiteralNode("FOO"));
CssFunctionNode func = new CssFunctionNode(Function.byName("url"), null);
CssStringNode argument = new CssStringNode(Type.SINGLE_QUOTED_STRING, "some_url");
func.setArguments(new CssFunctionArgumentsNode(ImmutableList.<CssValueNode>of(argument)));
def.addChildToBack(func);
// Visit children
when(testVisitor.enterFunctionNode(any(CssFunctionNode.class))).thenReturn(true);
DefaultVisitController controller = new DefaultVisitController(def, true);
controller.startVisit(testVisitor);
ArgumentCaptor<CssValueNode> argCaptor = ArgumentCaptor.forClass(CssValueNode.class);
InOrder inOrder = Mockito.inOrder(testVisitor);
inOrder.verify(testVisitor).enterDefinition(def);
inOrder.verify(testVisitor).enterFunctionNode(func);
inOrder.verify(testVisitor).enterArgumentNode(argCaptor.capture());
inOrder.verify(testVisitor).leaveArgumentNode(argCaptor.capture());
inOrder.verify(testVisitor).leaveFunctionNode(func);
inOrder.verify(testVisitor).leaveDefinition(def);
inOrder.verifyNoMoreInteractions();
assertThat(argCaptor.getValue().toString()).isEqualTo(argument.toString());
}
@Test
public void testVisitFunctionNode_dontVisitChildren() {
CssDefinitionNode def = new CssDefinitionNode(new CssLiteralNode("FOO"));
CssFunctionNode func = new CssFunctionNode(Function.byName("url"), null);
CssStringNode argument = new CssStringNode(Type.SINGLE_QUOTED_STRING, "some_url");
func.setArguments(new CssFunctionArgumentsNode(ImmutableList.<CssValueNode>of(argument)));
def.addChildToBack(func);
// Prevent visiting children
when(testVisitor.enterFunctionNode(any(CssFunctionNode.class))).thenReturn(false);
DefaultVisitController controller = new DefaultVisitController(def, true);
controller.startVisit(testVisitor);
InOrder inOrder = Mockito.inOrder(testVisitor);
inOrder.verify(testVisitor).enterDefinition(def);
inOrder.verify(testVisitor).enterFunctionNode(func);
inOrder.verify(testVisitor).leaveFunctionNode(func);
inOrder.verify(testVisitor).leaveDefinition(def);
inOrder.verifyNoMoreInteractions();
}
@Test
public void testVisitValueNodes() {
List<CssValueNode> simpleValues = Lists.newLinkedList();
for (String v : new String[] {"a", "b", "c"}) {
simpleValues.add(new CssLiteralNode(v, null));
}
CssCompositeValueNode parent =
new CssCompositeValueNode(
simpleValues, CssCompositeValueNode.Operator.COMMA, null);
CssPropertyValueNode propValue = new CssPropertyValueNode();
propValue.addChildToBack(parent);
CssDeclarationNode decl =
new CssDeclarationNode(
new CssPropertyNode("prop"),
propValue);
CssDeclarationBlockNode db = new CssDeclarationBlockNode();
db.addChildToBack(decl);
CssRulesetNode ruleset = new CssRulesetNode(db);
ruleset.addSelector(new CssSelectorNode("name", null));
CssBlockNode b = new CssBlockNode(false);
b.addChildToBack(ruleset);
CssTree t = new CssTree(null, new CssRootNode(b));
final List<CssValueNode> cNodes = Lists.newLinkedList();
final List<CssValueNode> evnNodes = Lists.newLinkedList();
DefaultTreeVisitor testVisitor =
new DefaultTreeVisitor() {
@Override
public boolean enterCompositeValueNode(CssCompositeValueNode c) {
cNodes.add(c);
return true;
}
@Override
public boolean enterValueNode(CssValueNode n) {
evnNodes.add(n);
return true;
}
};
DefaultVisitController controller = new DefaultVisitController(t, true);
controller.startVisit(testVisitor);
assertThat(evnNodes).hasSize(simpleValues.size());
for (CssValueNode i : simpleValues) {
assertThat(evnNodes).contains(i);
}
assertThat(cNodes).hasSize(1);
assertThat(cNodes).contains(parent);
}
@Test
public void testVisitCompositeValueNodeWithFunction() {
List<CssValueNode> simpleValues = Lists.newLinkedList();
simpleValues.add(
new CssFunctionNode(CssFunctionNode.Function.byName("url"), null));
simpleValues.add(
new CssFunctionNode(CssFunctionNode.Function.byName("url"), null));
CssCompositeValueNode parent =
new CssCompositeValueNode(
simpleValues, CssCompositeValueNode.Operator.COMMA, null);
CssPropertyValueNode propValue = new CssPropertyValueNode();
propValue.addChildToBack(parent);
CssDeclarationNode decl =
new CssDeclarationNode(
new CssPropertyNode("prop"),
propValue);
CssDeclarationBlockNode db = new CssDeclarationBlockNode();
db.addChildToBack(decl);
CssRulesetNode ruleset = new CssRulesetNode(db);
ruleset.addSelector(new CssSelectorNode("name", null));
CssBlockNode b = new CssBlockNode(false);
b.addChildToBack(ruleset);
CssTree t = new CssTree(null, new CssRootNode(b));
final List<CssValueNode> compositeNode = Lists.newLinkedList();
final List<CssValueNode> functionNodes = Lists.newLinkedList();
DefaultTreeVisitor testVisitor =
new DefaultTreeVisitor() {
@Override
public boolean enterCompositeValueNode(CssCompositeValueNode c) {
compositeNode.add(c);
return true;
}
@Override
public boolean enterFunctionNode(CssFunctionNode n) {
functionNodes.add(n);
return true;
}
};
DefaultVisitController controller = new DefaultVisitController(t, true);
controller.startVisit(testVisitor);
assertThat(functionNodes).hasSize(2);
assertThat(compositeNode).hasSize(1);
assertThat(compositeNode).contains(parent);
}
public void verifyRemoveablePropertyValueElement(String backgroundValue) {
try {
CssTree t = new GssParser(
new com.google.common.css.SourceCode(null,
String.format("p { background: %s; }", backgroundValue)))
.parse();
assertWithMessage(
"This test assumes we start with a stylesheet containing detectable "
+ "function nodes.")
.that(FunctionDetector.detect(t))
.isTrue();
final DefaultVisitController vc =
new DefaultVisitController(t, true /* allowMutating */);
CssTreeVisitor functionRemover =
new DefaultTreeVisitor() {
@Override
public boolean enterFunctionNode(CssFunctionNode node) {
System.err.println(node.getParent().getClass().getName());
vc.removeCurrentNode();
return true;
}
};
vc.startVisit(functionRemover);
assertWithMessage(
"We should be able to remove function nodes that occur as property " + "values.")
.that(FunctionDetector.detect(t))
.isFalse();
assertWithMessage(
"Removing one composite element within a property value should not "
+ "affect its siblings.")
.that(ValueDetector.detect(t, "red"))
.isTrue();
assertWithMessage(
"Removing one composite element within a property value should not "
+ "affect its siblings.")
.that(ValueDetector.detect(t, "fixed"))
.isTrue();
} catch (GssParserException e) {
throw new RuntimeException(e);
}
}
@Test
public void testRemoveCompositePropertyValueElement() {
verifyRemoveablePropertyValueElement(
"url(http://www.google.com/logo), red fixed");
}
@Test
public void testRemoveCompositePropertyValueElementMiddle() {
verifyRemoveablePropertyValueElement(
"red, url(http://www.google.com/logo), fixed");
}
@Test
public void testRemoveCompositePropertyValueElementEnd() {
verifyRemoveablePropertyValueElement(
"red fixed, url(http://www.google.com/logo)");
}
@Test
public void testVisitForLoop() {
CssLiteralNode x = new CssLiteralNode("FOO");
CssDefinitionNode def = new CssDefinitionNode(x);
CssBlockNode loopBlock = new CssBlockNode(true);
loopBlock.addChildToBack(def);
CssValueNode from = new CssNumericNode("1", CssNumericNode.NO_UNITS);
CssValueNode to = new CssNumericNode("5", CssNumericNode.NO_UNITS);
CssValueNode step = new CssNumericNode("2", CssNumericNode.NO_UNITS);
CssLiteralNode variableNode = new CssLiteralNode("for");
CssForLoopRuleNode loop = new CssForLoopRuleNode(
variableNode, loopBlock, null, from, to, step, "i", 0, null);
CssBlockNode block = new CssBlockNode(false);
block.addChildToBack(loop);
CssRootNode root = new CssRootNode(block);
CssTree tree = new CssTree(null, root);
DefaultVisitController controller = new DefaultVisitController(tree, true);
controller.startVisit(testVisitor);
InOrder inOrder = Mockito.inOrder(testVisitor);
// Enter Tree gets the root - there is no enterRoot.
inOrder.verify(testVisitor).enterTree(root);
// There are blocks that get created even if you don't add them.
inOrder.verify(testVisitor).enterImportBlock(Matchers.<CssImportBlockNode>any());
// Then we enter the block.
inOrder.verify(testVisitor).enterBlock(block);
// Then we enter the for loop node.
inOrder.verify(testVisitor).enterForLoop(loop);
// Then we enter the definition within the for loop.
inOrder.verify(testVisitor).enterDefinition(def);
}
@Test
public void testCssCompositeValueNodeBecomesParentForNewChildren() {
CssLiteralNode foo = new CssLiteralNode("foo");
CssLiteralNode bar = new CssLiteralNode("bar");
CssCompositeValueNode composite =
new CssCompositeValueNode(ImmutableList.<CssValueNode>of(foo, bar), Operator.COMMA, null);
final MutatingVisitController controller = new DefaultVisitController(composite, true);
controller.startVisit(
new DefaultTreeVisitor() {
@Override
public boolean enterValueNode(CssValueNode value) {
if (value.getValue().equals("bar")) {
CssLiteralNode baz = new CssLiteralNode("baz");
CssLiteralNode quux = new CssLiteralNode("quux");
CssCompositeValueNode newNode =
new CssCompositeValueNode(ImmutableList.<CssValueNode>of(baz, quux), Operator.COMMA, null);
controller.replaceCurrentBlockChildWith(ImmutableList.of(newNode), false);
}
return true;
}
});
assertThat(composite.toString()).isEqualTo("foo,baz,quux");
CssValueNode fooValue = composite.getValues().get(0);
assertThat(fooValue.getParent()).isSameAs(composite);
CssValueNode bazValue = composite.getValues().get(1);
assertThat(bazValue.getParent()).isSameAs(composite);
CssValueNode quuxValue = composite.getValues().get(1);
assertThat(quuxValue.getParent()).isSameAs(composite);
}
private static class ValueDetector extends DefaultTreeVisitor {
private final String quarry;
private final boolean[] foundValue = {false};
private ValueDetector(String quarry) {
this.quarry = quarry;
}
public static boolean detect(CssTree t, String quarry) {
ValueDetector detector = new ValueDetector(quarry);
new DefaultVisitController(t, false /* allowMutating */).startVisit(detector);
return detector.foundValue[0];
}
@Override
public boolean enterValueNode(CssValueNode node) {
if (quarry.equals(node.getValue())) {
foundValue[0] = true;
}
return true;
}
}
private static class FunctionDetector extends DefaultTreeVisitor {
private final boolean[] foundValue = {false};
public static boolean detect(CssTree t) {
FunctionDetector detector = new FunctionDetector();
new DefaultVisitController(t, false /* allowMutating */).startVisit(detector);
return detector.foundValue[0];
}
@Override
public boolean enterFunctionNode(CssFunctionNode node) {
foundValue[0] = true;
return true;
}
}
}
|
googleapis/google-cloud-java | 36,924 | java-shopping-merchant-reports/proto-google-shopping-merchant-reports-v1beta/src/main/java/com/google/shopping/merchant/reports/v1beta/SearchResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/shopping/merchant/reports/v1beta/reports.proto
// Protobuf Java Version: 3.25.8
package com.google.shopping.merchant.reports.v1beta;
/**
*
*
* <pre>
* Response message for the `ReportService.Search` method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.reports.v1beta.SearchResponse}
*/
public final class SearchResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.shopping.merchant.reports.v1beta.SearchResponse)
SearchResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use SearchResponse.newBuilder() to construct.
private SearchResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SearchResponse() {
results_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SearchResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.reports.v1beta.ReportsProto
.internal_static_google_shopping_merchant_reports_v1beta_SearchResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.reports.v1beta.ReportsProto
.internal_static_google_shopping_merchant_reports_v1beta_SearchResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.reports.v1beta.SearchResponse.class,
com.google.shopping.merchant.reports.v1beta.SearchResponse.Builder.class);
}
public static final int RESULTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.shopping.merchant.reports.v1beta.ReportRow> results_;
/**
*
*
* <pre>
* Rows that matched the search query.
* </pre>
*
* <code>repeated .google.shopping.merchant.reports.v1beta.ReportRow results = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.shopping.merchant.reports.v1beta.ReportRow> getResultsList() {
return results_;
}
/**
*
*
* <pre>
* Rows that matched the search query.
* </pre>
*
* <code>repeated .google.shopping.merchant.reports.v1beta.ReportRow results = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.shopping.merchant.reports.v1beta.ReportRowOrBuilder>
getResultsOrBuilderList() {
return results_;
}
/**
*
*
* <pre>
* Rows that matched the search query.
* </pre>
*
* <code>repeated .google.shopping.merchant.reports.v1beta.ReportRow results = 1;</code>
*/
@java.lang.Override
public int getResultsCount() {
return results_.size();
}
/**
*
*
* <pre>
* Rows that matched the search query.
* </pre>
*
* <code>repeated .google.shopping.merchant.reports.v1beta.ReportRow results = 1;</code>
*/
@java.lang.Override
public com.google.shopping.merchant.reports.v1beta.ReportRow getResults(int index) {
return results_.get(index);
}
/**
*
*
* <pre>
* Rows that matched the search query.
* </pre>
*
* <code>repeated .google.shopping.merchant.reports.v1beta.ReportRow results = 1;</code>
*/
@java.lang.Override
public com.google.shopping.merchant.reports.v1beta.ReportRowOrBuilder getResultsOrBuilder(
int index) {
return results_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token which can be sent as `page_token` to retrieve the next page. If
* omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* Token which can be sent as `page_token` to retrieve the next page. If
* omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < results_.size(); i++) {
output.writeMessage(1, results_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < results_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, results_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.shopping.merchant.reports.v1beta.SearchResponse)) {
return super.equals(obj);
}
com.google.shopping.merchant.reports.v1beta.SearchResponse other =
(com.google.shopping.merchant.reports.v1beta.SearchResponse) obj;
if (!getResultsList().equals(other.getResultsList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getResultsCount() > 0) {
hash = (37 * hash) + RESULTS_FIELD_NUMBER;
hash = (53 * hash) + getResultsList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.shopping.merchant.reports.v1beta.SearchResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.reports.v1beta.SearchResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.reports.v1beta.SearchResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.reports.v1beta.SearchResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.reports.v1beta.SearchResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.shopping.merchant.reports.v1beta.SearchResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.shopping.merchant.reports.v1beta.SearchResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.reports.v1beta.SearchResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.reports.v1beta.SearchResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.reports.v1beta.SearchResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.shopping.merchant.reports.v1beta.SearchResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.shopping.merchant.reports.v1beta.SearchResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.shopping.merchant.reports.v1beta.SearchResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for the `ReportService.Search` method.
* </pre>
*
* Protobuf type {@code google.shopping.merchant.reports.v1beta.SearchResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.shopping.merchant.reports.v1beta.SearchResponse)
com.google.shopping.merchant.reports.v1beta.SearchResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.shopping.merchant.reports.v1beta.ReportsProto
.internal_static_google_shopping_merchant_reports_v1beta_SearchResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.shopping.merchant.reports.v1beta.ReportsProto
.internal_static_google_shopping_merchant_reports_v1beta_SearchResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.shopping.merchant.reports.v1beta.SearchResponse.class,
com.google.shopping.merchant.reports.v1beta.SearchResponse.Builder.class);
}
// Construct using com.google.shopping.merchant.reports.v1beta.SearchResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (resultsBuilder_ == null) {
results_ = java.util.Collections.emptyList();
} else {
results_ = null;
resultsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.shopping.merchant.reports.v1beta.ReportsProto
.internal_static_google_shopping_merchant_reports_v1beta_SearchResponse_descriptor;
}
@java.lang.Override
public com.google.shopping.merchant.reports.v1beta.SearchResponse getDefaultInstanceForType() {
return com.google.shopping.merchant.reports.v1beta.SearchResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.shopping.merchant.reports.v1beta.SearchResponse build() {
com.google.shopping.merchant.reports.v1beta.SearchResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.shopping.merchant.reports.v1beta.SearchResponse buildPartial() {
com.google.shopping.merchant.reports.v1beta.SearchResponse result =
new com.google.shopping.merchant.reports.v1beta.SearchResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.shopping.merchant.reports.v1beta.SearchResponse result) {
if (resultsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
results_ = java.util.Collections.unmodifiableList(results_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.results_ = results_;
} else {
result.results_ = resultsBuilder_.build();
}
}
private void buildPartial0(com.google.shopping.merchant.reports.v1beta.SearchResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.shopping.merchant.reports.v1beta.SearchResponse) {
return mergeFrom((com.google.shopping.merchant.reports.v1beta.SearchResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.shopping.merchant.reports.v1beta.SearchResponse other) {
if (other == com.google.shopping.merchant.reports.v1beta.SearchResponse.getDefaultInstance())
return this;
if (resultsBuilder_ == null) {
if (!other.results_.isEmpty()) {
if (results_.isEmpty()) {
results_ = other.results_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureResultsIsMutable();
results_.addAll(other.results_);
}
onChanged();
}
} else {
if (!other.results_.isEmpty()) {
if (resultsBuilder_.isEmpty()) {
resultsBuilder_.dispose();
resultsBuilder_ = null;
results_ = other.results_;
bitField0_ = (bitField0_ & ~0x00000001);
resultsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getResultsFieldBuilder()
: null;
} else {
resultsBuilder_.addAllMessages(other.results_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.shopping.merchant.reports.v1beta.ReportRow m =
input.readMessage(
com.google.shopping.merchant.reports.v1beta.ReportRow.parser(),
extensionRegistry);
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.add(m);
} else {
resultsBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.shopping.merchant.reports.v1beta.ReportRow> results_ =
java.util.Collections.emptyList();
private void ensureResultsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
results_ =
new java.util.ArrayList<com.google.shopping.merchant.reports.v1beta.ReportRow>(
results_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.shopping.merchant.reports.v1beta.ReportRow,
com.google.shopping.merchant.reports.v1beta.ReportRow.Builder,
com.google.shopping.merchant.reports.v1beta.ReportRowOrBuilder>
resultsBuilder_;
/**
*
*
* <pre>
* Rows that matched the search query.
* </pre>
*
* <code>repeated .google.shopping.merchant.reports.v1beta.ReportRow results = 1;</code>
*/
public java.util.List<com.google.shopping.merchant.reports.v1beta.ReportRow> getResultsList() {
if (resultsBuilder_ == null) {
return java.util.Collections.unmodifiableList(results_);
} else {
return resultsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Rows that matched the search query.
* </pre>
*
* <code>repeated .google.shopping.merchant.reports.v1beta.ReportRow results = 1;</code>
*/
public int getResultsCount() {
if (resultsBuilder_ == null) {
return results_.size();
} else {
return resultsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Rows that matched the search query.
* </pre>
*
* <code>repeated .google.shopping.merchant.reports.v1beta.ReportRow results = 1;</code>
*/
public com.google.shopping.merchant.reports.v1beta.ReportRow getResults(int index) {
if (resultsBuilder_ == null) {
return results_.get(index);
} else {
return resultsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Rows that matched the search query.
* </pre>
*
* <code>repeated .google.shopping.merchant.reports.v1beta.ReportRow results = 1;</code>
*/
public Builder setResults(
int index, com.google.shopping.merchant.reports.v1beta.ReportRow value) {
if (resultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultsIsMutable();
results_.set(index, value);
onChanged();
} else {
resultsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Rows that matched the search query.
* </pre>
*
* <code>repeated .google.shopping.merchant.reports.v1beta.ReportRow results = 1;</code>
*/
public Builder setResults(
int index, com.google.shopping.merchant.reports.v1beta.ReportRow.Builder builderForValue) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.set(index, builderForValue.build());
onChanged();
} else {
resultsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Rows that matched the search query.
* </pre>
*
* <code>repeated .google.shopping.merchant.reports.v1beta.ReportRow results = 1;</code>
*/
public Builder addResults(com.google.shopping.merchant.reports.v1beta.ReportRow value) {
if (resultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultsIsMutable();
results_.add(value);
onChanged();
} else {
resultsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Rows that matched the search query.
* </pre>
*
* <code>repeated .google.shopping.merchant.reports.v1beta.ReportRow results = 1;</code>
*/
public Builder addResults(
int index, com.google.shopping.merchant.reports.v1beta.ReportRow value) {
if (resultsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureResultsIsMutable();
results_.add(index, value);
onChanged();
} else {
resultsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Rows that matched the search query.
* </pre>
*
* <code>repeated .google.shopping.merchant.reports.v1beta.ReportRow results = 1;</code>
*/
public Builder addResults(
com.google.shopping.merchant.reports.v1beta.ReportRow.Builder builderForValue) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.add(builderForValue.build());
onChanged();
} else {
resultsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Rows that matched the search query.
* </pre>
*
* <code>repeated .google.shopping.merchant.reports.v1beta.ReportRow results = 1;</code>
*/
public Builder addResults(
int index, com.google.shopping.merchant.reports.v1beta.ReportRow.Builder builderForValue) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.add(index, builderForValue.build());
onChanged();
} else {
resultsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Rows that matched the search query.
* </pre>
*
* <code>repeated .google.shopping.merchant.reports.v1beta.ReportRow results = 1;</code>
*/
public Builder addAllResults(
java.lang.Iterable<? extends com.google.shopping.merchant.reports.v1beta.ReportRow>
values) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, results_);
onChanged();
} else {
resultsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Rows that matched the search query.
* </pre>
*
* <code>repeated .google.shopping.merchant.reports.v1beta.ReportRow results = 1;</code>
*/
public Builder clearResults() {
if (resultsBuilder_ == null) {
results_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
resultsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Rows that matched the search query.
* </pre>
*
* <code>repeated .google.shopping.merchant.reports.v1beta.ReportRow results = 1;</code>
*/
public Builder removeResults(int index) {
if (resultsBuilder_ == null) {
ensureResultsIsMutable();
results_.remove(index);
onChanged();
} else {
resultsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Rows that matched the search query.
* </pre>
*
* <code>repeated .google.shopping.merchant.reports.v1beta.ReportRow results = 1;</code>
*/
public com.google.shopping.merchant.reports.v1beta.ReportRow.Builder getResultsBuilder(
int index) {
return getResultsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Rows that matched the search query.
* </pre>
*
* <code>repeated .google.shopping.merchant.reports.v1beta.ReportRow results = 1;</code>
*/
public com.google.shopping.merchant.reports.v1beta.ReportRowOrBuilder getResultsOrBuilder(
int index) {
if (resultsBuilder_ == null) {
return results_.get(index);
} else {
return resultsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Rows that matched the search query.
* </pre>
*
* <code>repeated .google.shopping.merchant.reports.v1beta.ReportRow results = 1;</code>
*/
public java.util.List<? extends com.google.shopping.merchant.reports.v1beta.ReportRowOrBuilder>
getResultsOrBuilderList() {
if (resultsBuilder_ != null) {
return resultsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(results_);
}
}
/**
*
*
* <pre>
* Rows that matched the search query.
* </pre>
*
* <code>repeated .google.shopping.merchant.reports.v1beta.ReportRow results = 1;</code>
*/
public com.google.shopping.merchant.reports.v1beta.ReportRow.Builder addResultsBuilder() {
return getResultsFieldBuilder()
.addBuilder(com.google.shopping.merchant.reports.v1beta.ReportRow.getDefaultInstance());
}
/**
*
*
* <pre>
* Rows that matched the search query.
* </pre>
*
* <code>repeated .google.shopping.merchant.reports.v1beta.ReportRow results = 1;</code>
*/
public com.google.shopping.merchant.reports.v1beta.ReportRow.Builder addResultsBuilder(
int index) {
return getResultsFieldBuilder()
.addBuilder(
index, com.google.shopping.merchant.reports.v1beta.ReportRow.getDefaultInstance());
}
/**
*
*
* <pre>
* Rows that matched the search query.
* </pre>
*
* <code>repeated .google.shopping.merchant.reports.v1beta.ReportRow results = 1;</code>
*/
public java.util.List<com.google.shopping.merchant.reports.v1beta.ReportRow.Builder>
getResultsBuilderList() {
return getResultsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.shopping.merchant.reports.v1beta.ReportRow,
com.google.shopping.merchant.reports.v1beta.ReportRow.Builder,
com.google.shopping.merchant.reports.v1beta.ReportRowOrBuilder>
getResultsFieldBuilder() {
if (resultsBuilder_ == null) {
resultsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.shopping.merchant.reports.v1beta.ReportRow,
com.google.shopping.merchant.reports.v1beta.ReportRow.Builder,
com.google.shopping.merchant.reports.v1beta.ReportRowOrBuilder>(
results_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
results_ = null;
}
return resultsBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* Token which can be sent as `page_token` to retrieve the next page. If
* omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Token which can be sent as `page_token` to retrieve the next page. If
* omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Token which can be sent as `page_token` to retrieve the next page. If
* omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Token which can be sent as `page_token` to retrieve the next page. If
* omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* Token which can be sent as `page_token` to retrieve the next page. If
* omitted, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.shopping.merchant.reports.v1beta.SearchResponse)
}
// @@protoc_insertion_point(class_scope:google.shopping.merchant.reports.v1beta.SearchResponse)
private static final com.google.shopping.merchant.reports.v1beta.SearchResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.shopping.merchant.reports.v1beta.SearchResponse();
}
public static com.google.shopping.merchant.reports.v1beta.SearchResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SearchResponse> PARSER =
new com.google.protobuf.AbstractParser<SearchResponse>() {
@java.lang.Override
public SearchResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SearchResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SearchResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.shopping.merchant.reports.v1beta.SearchResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 36,927 | java-functions/proto-google-cloud-functions-v2beta/src/main/java/com/google/cloud/functions/v2beta/CreateFunctionRequest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/functions/v2beta/functions.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.functions.v2beta;
/**
*
*
* <pre>
* Request for the `CreateFunction` method.
* </pre>
*
* Protobuf type {@code google.cloud.functions.v2beta.CreateFunctionRequest}
*/
public final class CreateFunctionRequest extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.functions.v2beta.CreateFunctionRequest)
CreateFunctionRequestOrBuilder {
private static final long serialVersionUID = 0L;
// Use CreateFunctionRequest.newBuilder() to construct.
private CreateFunctionRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private CreateFunctionRequest() {
parent_ = "";
functionId_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new CreateFunctionRequest();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.functions.v2beta.FunctionsProto
.internal_static_google_cloud_functions_v2beta_CreateFunctionRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.functions.v2beta.FunctionsProto
.internal_static_google_cloud_functions_v2beta_CreateFunctionRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.functions.v2beta.CreateFunctionRequest.class,
com.google.cloud.functions.v2beta.CreateFunctionRequest.Builder.class);
}
private int bitField0_;
public static final int PARENT_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private volatile java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The project and location in which the function should be created,
* specified in the format `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
@java.lang.Override
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
}
}
/**
*
*
* <pre>
* Required. The project and location in which the function should be created,
* specified in the format `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
@java.lang.Override
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
public static final int FUNCTION_FIELD_NUMBER = 2;
private com.google.cloud.functions.v2beta.Function function_;
/**
*
*
* <pre>
* Required. Function to be created.
* </pre>
*
* <code>
* .google.cloud.functions.v2beta.Function function = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the function field is set.
*/
@java.lang.Override
public boolean hasFunction() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Required. Function to be created.
* </pre>
*
* <code>
* .google.cloud.functions.v2beta.Function function = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The function.
*/
@java.lang.Override
public com.google.cloud.functions.v2beta.Function getFunction() {
return function_ == null
? com.google.cloud.functions.v2beta.Function.getDefaultInstance()
: function_;
}
/**
*
*
* <pre>
* Required. Function to be created.
* </pre>
*
* <code>
* .google.cloud.functions.v2beta.Function function = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
@java.lang.Override
public com.google.cloud.functions.v2beta.FunctionOrBuilder getFunctionOrBuilder() {
return function_ == null
? com.google.cloud.functions.v2beta.Function.getDefaultInstance()
: function_;
}
public static final int FUNCTION_ID_FIELD_NUMBER = 3;
@SuppressWarnings("serial")
private volatile java.lang.Object functionId_ = "";
/**
*
*
* <pre>
* The ID to use for the function, which will become the final component of
* the function's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-/.
* </pre>
*
* <code>string function_id = 3;</code>
*
* @return The functionId.
*/
@java.lang.Override
public java.lang.String getFunctionId() {
java.lang.Object ref = functionId_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
functionId_ = s;
return s;
}
}
/**
*
*
* <pre>
* The ID to use for the function, which will become the final component of
* the function's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-/.
* </pre>
*
* <code>string function_id = 3;</code>
*
* @return The bytes for functionId.
*/
@java.lang.Override
public com.google.protobuf.ByteString getFunctionIdBytes() {
java.lang.Object ref = functionId_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
functionId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getFunction());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(functionId_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 3, functionId_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getFunction());
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(functionId_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, functionId_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.functions.v2beta.CreateFunctionRequest)) {
return super.equals(obj);
}
com.google.cloud.functions.v2beta.CreateFunctionRequest other =
(com.google.cloud.functions.v2beta.CreateFunctionRequest) obj;
if (!getParent().equals(other.getParent())) return false;
if (hasFunction() != other.hasFunction()) return false;
if (hasFunction()) {
if (!getFunction().equals(other.getFunction())) return false;
}
if (!getFunctionId().equals(other.getFunctionId())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + PARENT_FIELD_NUMBER;
hash = (53 * hash) + getParent().hashCode();
if (hasFunction()) {
hash = (37 * hash) + FUNCTION_FIELD_NUMBER;
hash = (53 * hash) + getFunction().hashCode();
}
hash = (37 * hash) + FUNCTION_ID_FIELD_NUMBER;
hash = (53 * hash) + getFunctionId().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.functions.v2beta.CreateFunctionRequest parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.functions.v2beta.CreateFunctionRequest parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.functions.v2beta.CreateFunctionRequest parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.functions.v2beta.CreateFunctionRequest parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.functions.v2beta.CreateFunctionRequest parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.functions.v2beta.CreateFunctionRequest parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.functions.v2beta.CreateFunctionRequest parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.functions.v2beta.CreateFunctionRequest parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.functions.v2beta.CreateFunctionRequest parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.functions.v2beta.CreateFunctionRequest parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.functions.v2beta.CreateFunctionRequest parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.functions.v2beta.CreateFunctionRequest parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.functions.v2beta.CreateFunctionRequest prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Request for the `CreateFunction` method.
* </pre>
*
* Protobuf type {@code google.cloud.functions.v2beta.CreateFunctionRequest}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.functions.v2beta.CreateFunctionRequest)
com.google.cloud.functions.v2beta.CreateFunctionRequestOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.functions.v2beta.FunctionsProto
.internal_static_google_cloud_functions_v2beta_CreateFunctionRequest_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.functions.v2beta.FunctionsProto
.internal_static_google_cloud_functions_v2beta_CreateFunctionRequest_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.functions.v2beta.CreateFunctionRequest.class,
com.google.cloud.functions.v2beta.CreateFunctionRequest.Builder.class);
}
// Construct using com.google.cloud.functions.v2beta.CreateFunctionRequest.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getFunctionFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
parent_ = "";
function_ = null;
if (functionBuilder_ != null) {
functionBuilder_.dispose();
functionBuilder_ = null;
}
functionId_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.functions.v2beta.FunctionsProto
.internal_static_google_cloud_functions_v2beta_CreateFunctionRequest_descriptor;
}
@java.lang.Override
public com.google.cloud.functions.v2beta.CreateFunctionRequest getDefaultInstanceForType() {
return com.google.cloud.functions.v2beta.CreateFunctionRequest.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.functions.v2beta.CreateFunctionRequest build() {
com.google.cloud.functions.v2beta.CreateFunctionRequest result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.functions.v2beta.CreateFunctionRequest buildPartial() {
com.google.cloud.functions.v2beta.CreateFunctionRequest result =
new com.google.cloud.functions.v2beta.CreateFunctionRequest(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.functions.v2beta.CreateFunctionRequest result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.parent_ = parent_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.function_ = functionBuilder_ == null ? function_ : functionBuilder_.build();
to_bitField0_ |= 0x00000001;
}
if (((from_bitField0_ & 0x00000004) != 0)) {
result.functionId_ = functionId_;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.functions.v2beta.CreateFunctionRequest) {
return mergeFrom((com.google.cloud.functions.v2beta.CreateFunctionRequest) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.functions.v2beta.CreateFunctionRequest other) {
if (other == com.google.cloud.functions.v2beta.CreateFunctionRequest.getDefaultInstance())
return this;
if (!other.getParent().isEmpty()) {
parent_ = other.parent_;
bitField0_ |= 0x00000001;
onChanged();
}
if (other.hasFunction()) {
mergeFunction(other.getFunction());
}
if (!other.getFunctionId().isEmpty()) {
functionId_ = other.functionId_;
bitField0_ |= 0x00000004;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
parent_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000001;
break;
} // case 10
case 18:
{
input.readMessage(getFunctionFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
case 26:
{
functionId_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000004;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.lang.Object parent_ = "";
/**
*
*
* <pre>
* Required. The project and location in which the function should be created,
* specified in the format `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The parent.
*/
public java.lang.String getParent() {
java.lang.Object ref = parent_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
parent_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* Required. The project and location in which the function should be created,
* specified in the format `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return The bytes for parent.
*/
public com.google.protobuf.ByteString getParentBytes() {
java.lang.Object ref = parent_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
parent_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* Required. The project and location in which the function should be created,
* specified in the format `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The parent to set.
* @return This builder for chaining.
*/
public Builder setParent(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The project and location in which the function should be created,
* specified in the format `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @return This builder for chaining.
*/
public Builder clearParent() {
parent_ = getDefaultInstance().getParent();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. The project and location in which the function should be created,
* specified in the format `projects/*/locations/*`
* </pre>
*
* <code>
* string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... }
* </code>
*
* @param value The bytes for parent to set.
* @return This builder for chaining.
*/
public Builder setParentBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
parent_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
private com.google.cloud.functions.v2beta.Function function_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.functions.v2beta.Function,
com.google.cloud.functions.v2beta.Function.Builder,
com.google.cloud.functions.v2beta.FunctionOrBuilder>
functionBuilder_;
/**
*
*
* <pre>
* Required. Function to be created.
* </pre>
*
* <code>
* .google.cloud.functions.v2beta.Function function = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return Whether the function field is set.
*/
public boolean hasFunction() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Required. Function to be created.
* </pre>
*
* <code>
* .google.cloud.functions.v2beta.Function function = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*
* @return The function.
*/
public com.google.cloud.functions.v2beta.Function getFunction() {
if (functionBuilder_ == null) {
return function_ == null
? com.google.cloud.functions.v2beta.Function.getDefaultInstance()
: function_;
} else {
return functionBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Required. Function to be created.
* </pre>
*
* <code>
* .google.cloud.functions.v2beta.Function function = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setFunction(com.google.cloud.functions.v2beta.Function value) {
if (functionBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
function_ = value;
} else {
functionBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Function to be created.
* </pre>
*
* <code>
* .google.cloud.functions.v2beta.Function function = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder setFunction(com.google.cloud.functions.v2beta.Function.Builder builderForValue) {
if (functionBuilder_ == null) {
function_ = builderForValue.build();
} else {
functionBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Function to be created.
* </pre>
*
* <code>
* .google.cloud.functions.v2beta.Function function = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder mergeFunction(com.google.cloud.functions.v2beta.Function value) {
if (functionBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& function_ != null
&& function_ != com.google.cloud.functions.v2beta.Function.getDefaultInstance()) {
getFunctionBuilder().mergeFrom(value);
} else {
function_ = value;
}
} else {
functionBuilder_.mergeFrom(value);
}
if (function_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Required. Function to be created.
* </pre>
*
* <code>
* .google.cloud.functions.v2beta.Function function = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public Builder clearFunction() {
bitField0_ = (bitField0_ & ~0x00000002);
function_ = null;
if (functionBuilder_ != null) {
functionBuilder_.dispose();
functionBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Function to be created.
* </pre>
*
* <code>
* .google.cloud.functions.v2beta.Function function = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.functions.v2beta.Function.Builder getFunctionBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getFunctionFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Required. Function to be created.
* </pre>
*
* <code>
* .google.cloud.functions.v2beta.Function function = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
public com.google.cloud.functions.v2beta.FunctionOrBuilder getFunctionOrBuilder() {
if (functionBuilder_ != null) {
return functionBuilder_.getMessageOrBuilder();
} else {
return function_ == null
? com.google.cloud.functions.v2beta.Function.getDefaultInstance()
: function_;
}
}
/**
*
*
* <pre>
* Required. Function to be created.
* </pre>
*
* <code>
* .google.cloud.functions.v2beta.Function function = 2 [(.google.api.field_behavior) = REQUIRED];
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.functions.v2beta.Function,
com.google.cloud.functions.v2beta.Function.Builder,
com.google.cloud.functions.v2beta.FunctionOrBuilder>
getFunctionFieldBuilder() {
if (functionBuilder_ == null) {
functionBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.functions.v2beta.Function,
com.google.cloud.functions.v2beta.Function.Builder,
com.google.cloud.functions.v2beta.FunctionOrBuilder>(
getFunction(), getParentForChildren(), isClean());
function_ = null;
}
return functionBuilder_;
}
private java.lang.Object functionId_ = "";
/**
*
*
* <pre>
* The ID to use for the function, which will become the final component of
* the function's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-/.
* </pre>
*
* <code>string function_id = 3;</code>
*
* @return The functionId.
*/
public java.lang.String getFunctionId() {
java.lang.Object ref = functionId_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
functionId_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* The ID to use for the function, which will become the final component of
* the function's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-/.
* </pre>
*
* <code>string function_id = 3;</code>
*
* @return The bytes for functionId.
*/
public com.google.protobuf.ByteString getFunctionIdBytes() {
java.lang.Object ref = functionId_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
functionId_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* The ID to use for the function, which will become the final component of
* the function's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-/.
* </pre>
*
* <code>string function_id = 3;</code>
*
* @param value The functionId to set.
* @return This builder for chaining.
*/
public Builder setFunctionId(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
functionId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
/**
*
*
* <pre>
* The ID to use for the function, which will become the final component of
* the function's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-/.
* </pre>
*
* <code>string function_id = 3;</code>
*
* @return This builder for chaining.
*/
public Builder clearFunctionId() {
functionId_ = getDefaultInstance().getFunctionId();
bitField0_ = (bitField0_ & ~0x00000004);
onChanged();
return this;
}
/**
*
*
* <pre>
* The ID to use for the function, which will become the final component of
* the function's resource name.
*
* This value should be 4-63 characters, and valid characters
* are /[a-z][0-9]-/.
* </pre>
*
* <code>string function_id = 3;</code>
*
* @param value The bytes for functionId to set.
* @return This builder for chaining.
*/
public Builder setFunctionIdBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
functionId_ = value;
bitField0_ |= 0x00000004;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.functions.v2beta.CreateFunctionRequest)
}
// @@protoc_insertion_point(class_scope:google.cloud.functions.v2beta.CreateFunctionRequest)
private static final com.google.cloud.functions.v2beta.CreateFunctionRequest DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.functions.v2beta.CreateFunctionRequest();
}
public static com.google.cloud.functions.v2beta.CreateFunctionRequest getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<CreateFunctionRequest> PARSER =
new com.google.protobuf.AbstractParser<CreateFunctionRequest>() {
@java.lang.Override
public CreateFunctionRequest parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<CreateFunctionRequest> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<CreateFunctionRequest> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.functions.v2beta.CreateFunctionRequest getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,233 | java-compute/google-cloud-compute/src/main/java/com/google/cloud/compute/v1/stub/ResourcePoliciesStubSettings.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.compute.v1.stub;
import static com.google.cloud.compute.v1.ResourcePoliciesClient.AggregatedListPagedResponse;
import static com.google.cloud.compute.v1.ResourcePoliciesClient.ListPagedResponse;
import com.google.api.core.ApiFunction;
import com.google.api.core.ApiFuture;
import com.google.api.core.ObsoleteApi;
import com.google.api.gax.core.GaxProperties;
import com.google.api.gax.core.GoogleCredentialsProvider;
import com.google.api.gax.core.InstantiatingExecutorProvider;
import com.google.api.gax.httpjson.GaxHttpJsonProperties;
import com.google.api.gax.httpjson.HttpJsonTransportChannel;
import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider;
import com.google.api.gax.httpjson.ProtoOperationTransformers;
import com.google.api.gax.longrunning.OperationSnapshot;
import com.google.api.gax.longrunning.OperationTimedPollAlgorithm;
import com.google.api.gax.retrying.RetrySettings;
import com.google.api.gax.rpc.ApiCallContext;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.ClientContext;
import com.google.api.gax.rpc.OperationCallSettings;
import com.google.api.gax.rpc.PageContext;
import com.google.api.gax.rpc.PagedCallSettings;
import com.google.api.gax.rpc.PagedListDescriptor;
import com.google.api.gax.rpc.PagedListResponseFactory;
import com.google.api.gax.rpc.StatusCode;
import com.google.api.gax.rpc.StubSettings;
import com.google.api.gax.rpc.TransportChannelProvider;
import com.google.api.gax.rpc.UnaryCallSettings;
import com.google.api.gax.rpc.UnaryCallable;
import com.google.cloud.compute.v1.AggregatedListResourcePoliciesRequest;
import com.google.cloud.compute.v1.DeleteResourcePolicyRequest;
import com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest;
import com.google.cloud.compute.v1.GetResourcePolicyRequest;
import com.google.cloud.compute.v1.InsertResourcePolicyRequest;
import com.google.cloud.compute.v1.ListResourcePoliciesRequest;
import com.google.cloud.compute.v1.Operation;
import com.google.cloud.compute.v1.PatchResourcePolicyRequest;
import com.google.cloud.compute.v1.Policy;
import com.google.cloud.compute.v1.ResourcePoliciesScopedList;
import com.google.cloud.compute.v1.ResourcePolicy;
import com.google.cloud.compute.v1.ResourcePolicyAggregatedList;
import com.google.cloud.compute.v1.ResourcePolicyList;
import com.google.cloud.compute.v1.SetIamPolicyResourcePolicyRequest;
import com.google.cloud.compute.v1.TestIamPermissionsResourcePolicyRequest;
import com.google.cloud.compute.v1.TestPermissionsResponse;
import com.google.common.collect.ImmutableList;
import com.google.common.collect.ImmutableMap;
import com.google.common.collect.ImmutableSet;
import com.google.common.collect.Lists;
import java.io.IOException;
import java.time.Duration;
import java.util.List;
import java.util.Map;
import javax.annotation.Generated;
// AUTO-GENERATED DOCUMENTATION AND CLASS.
/**
* Settings class to configure an instance of {@link ResourcePoliciesStub}.
*
* <p>The default instance has everything set to sensible defaults:
*
* <ul>
* <li>The default service address (compute.googleapis.com) and default port (443) are used.
* <li>Credentials are acquired automatically through Application Default Credentials.
* <li>Retries are configured for idempotent methods but not for non-idempotent methods.
* </ul>
*
* <p>The builder of this class is recursive, so contained classes are themselves builders. When
* build() is called, the tree of builders is called to create the complete settings object.
*
* <p>For example, to set the
* [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings)
* of get:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* ResourcePoliciesStubSettings.Builder resourcePoliciesSettingsBuilder =
* ResourcePoliciesStubSettings.newBuilder();
* resourcePoliciesSettingsBuilder
* .getSettings()
* .setRetrySettings(
* resourcePoliciesSettingsBuilder
* .getSettings()
* .getRetrySettings()
* .toBuilder()
* .setInitialRetryDelayDuration(Duration.ofSeconds(1))
* .setInitialRpcTimeoutDuration(Duration.ofSeconds(5))
* .setMaxAttempts(5)
* .setMaxRetryDelayDuration(Duration.ofSeconds(30))
* .setMaxRpcTimeoutDuration(Duration.ofSeconds(60))
* .setRetryDelayMultiplier(1.3)
* .setRpcTimeoutMultiplier(1.5)
* .setTotalTimeoutDuration(Duration.ofSeconds(300))
* .build());
* ResourcePoliciesStubSettings resourcePoliciesSettings = resourcePoliciesSettingsBuilder.build();
* }</pre>
*
* Please refer to the [Client Side Retry
* Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for
* additional support in setting retries.
*
* <p>To configure the RetrySettings of a Long Running Operation method, create an
* OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to
* configure the RetrySettings for delete:
*
* <pre>{@code
* // This snippet has been automatically generated and should be regarded as a code template only.
* // It will require modifications to work:
* // - It may require correct/in-range values for request initialization.
* // - It may require specifying regional endpoints when creating the service client as shown in
* // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library
* ResourcePoliciesStubSettings.Builder resourcePoliciesSettingsBuilder =
* ResourcePoliciesStubSettings.newBuilder();
* TimedRetryAlgorithm timedRetryAlgorithm =
* OperationalTimedPollAlgorithm.create(
* RetrySettings.newBuilder()
* .setInitialRetryDelayDuration(Duration.ofMillis(500))
* .setRetryDelayMultiplier(1.5)
* .setMaxRetryDelayDuration(Duration.ofMillis(5000))
* .setTotalTimeoutDuration(Duration.ofHours(24))
* .build());
* resourcePoliciesSettingsBuilder
* .createClusterOperationSettings()
* .setPollingAlgorithm(timedRetryAlgorithm)
* .build();
* }</pre>
*/
@Generated("by gapic-generator-java")
public class ResourcePoliciesStubSettings extends StubSettings<ResourcePoliciesStubSettings> {
/** The default scopes of the service. */
private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES =
ImmutableList.<String>builder()
.add("https://www.googleapis.com/auth/compute")
.add("https://www.googleapis.com/auth/cloud-platform")
.build();
private final PagedCallSettings<
AggregatedListResourcePoliciesRequest,
ResourcePolicyAggregatedList,
AggregatedListPagedResponse>
aggregatedListSettings;
private final UnaryCallSettings<DeleteResourcePolicyRequest, Operation> deleteSettings;
private final OperationCallSettings<DeleteResourcePolicyRequest, Operation, Operation>
deleteOperationSettings;
private final UnaryCallSettings<GetResourcePolicyRequest, ResourcePolicy> getSettings;
private final UnaryCallSettings<GetIamPolicyResourcePolicyRequest, Policy> getIamPolicySettings;
private final UnaryCallSettings<InsertResourcePolicyRequest, Operation> insertSettings;
private final OperationCallSettings<InsertResourcePolicyRequest, Operation, Operation>
insertOperationSettings;
private final PagedCallSettings<
ListResourcePoliciesRequest, ResourcePolicyList, ListPagedResponse>
listSettings;
private final UnaryCallSettings<PatchResourcePolicyRequest, Operation> patchSettings;
private final OperationCallSettings<PatchResourcePolicyRequest, Operation, Operation>
patchOperationSettings;
private final UnaryCallSettings<SetIamPolicyResourcePolicyRequest, Policy> setIamPolicySettings;
private final UnaryCallSettings<TestIamPermissionsResourcePolicyRequest, TestPermissionsResponse>
testIamPermissionsSettings;
private static final PagedListDescriptor<
AggregatedListResourcePoliciesRequest,
ResourcePolicyAggregatedList,
Map.Entry<String, ResourcePoliciesScopedList>>
AGGREGATED_LIST_PAGE_STR_DESC =
new PagedListDescriptor<
AggregatedListResourcePoliciesRequest,
ResourcePolicyAggregatedList,
Map.Entry<String, ResourcePoliciesScopedList>>() {
@Override
public String emptyToken() {
return "";
}
@Override
public AggregatedListResourcePoliciesRequest injectToken(
AggregatedListResourcePoliciesRequest payload, String token) {
return AggregatedListResourcePoliciesRequest.newBuilder(payload)
.setPageToken(token)
.build();
}
@Override
public AggregatedListResourcePoliciesRequest injectPageSize(
AggregatedListResourcePoliciesRequest payload, int pageSize) {
return AggregatedListResourcePoliciesRequest.newBuilder(payload)
.setMaxResults(pageSize)
.build();
}
@Override
public Integer extractPageSize(AggregatedListResourcePoliciesRequest payload) {
return payload.getMaxResults();
}
@Override
public String extractNextToken(ResourcePolicyAggregatedList payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<Map.Entry<String, ResourcePoliciesScopedList>> extractResources(
ResourcePolicyAggregatedList payload) {
return payload.getItemsMap().entrySet();
}
};
private static final PagedListDescriptor<
ListResourcePoliciesRequest, ResourcePolicyList, ResourcePolicy>
LIST_PAGE_STR_DESC =
new PagedListDescriptor<
ListResourcePoliciesRequest, ResourcePolicyList, ResourcePolicy>() {
@Override
public String emptyToken() {
return "";
}
@Override
public ListResourcePoliciesRequest injectToken(
ListResourcePoliciesRequest payload, String token) {
return ListResourcePoliciesRequest.newBuilder(payload).setPageToken(token).build();
}
@Override
public ListResourcePoliciesRequest injectPageSize(
ListResourcePoliciesRequest payload, int pageSize) {
return ListResourcePoliciesRequest.newBuilder(payload)
.setMaxResults(pageSize)
.build();
}
@Override
public Integer extractPageSize(ListResourcePoliciesRequest payload) {
return payload.getMaxResults();
}
@Override
public String extractNextToken(ResourcePolicyList payload) {
return payload.getNextPageToken();
}
@Override
public Iterable<ResourcePolicy> extractResources(ResourcePolicyList payload) {
return payload.getItemsList();
}
};
private static final PagedListResponseFactory<
AggregatedListResourcePoliciesRequest,
ResourcePolicyAggregatedList,
AggregatedListPagedResponse>
AGGREGATED_LIST_PAGE_STR_FACT =
new PagedListResponseFactory<
AggregatedListResourcePoliciesRequest,
ResourcePolicyAggregatedList,
AggregatedListPagedResponse>() {
@Override
public ApiFuture<AggregatedListPagedResponse> getFuturePagedResponse(
UnaryCallable<AggregatedListResourcePoliciesRequest, ResourcePolicyAggregatedList>
callable,
AggregatedListResourcePoliciesRequest request,
ApiCallContext context,
ApiFuture<ResourcePolicyAggregatedList> futureResponse) {
PageContext<
AggregatedListResourcePoliciesRequest,
ResourcePolicyAggregatedList,
Map.Entry<String, ResourcePoliciesScopedList>>
pageContext =
PageContext.create(callable, AGGREGATED_LIST_PAGE_STR_DESC, request, context);
return AggregatedListPagedResponse.createAsync(pageContext, futureResponse);
}
};
private static final PagedListResponseFactory<
ListResourcePoliciesRequest, ResourcePolicyList, ListPagedResponse>
LIST_PAGE_STR_FACT =
new PagedListResponseFactory<
ListResourcePoliciesRequest, ResourcePolicyList, ListPagedResponse>() {
@Override
public ApiFuture<ListPagedResponse> getFuturePagedResponse(
UnaryCallable<ListResourcePoliciesRequest, ResourcePolicyList> callable,
ListResourcePoliciesRequest request,
ApiCallContext context,
ApiFuture<ResourcePolicyList> futureResponse) {
PageContext<ListResourcePoliciesRequest, ResourcePolicyList, ResourcePolicy>
pageContext = PageContext.create(callable, LIST_PAGE_STR_DESC, request, context);
return ListPagedResponse.createAsync(pageContext, futureResponse);
}
};
/** Returns the object with the settings used for calls to aggregatedList. */
public PagedCallSettings<
AggregatedListResourcePoliciesRequest,
ResourcePolicyAggregatedList,
AggregatedListPagedResponse>
aggregatedListSettings() {
return aggregatedListSettings;
}
/** Returns the object with the settings used for calls to delete. */
public UnaryCallSettings<DeleteResourcePolicyRequest, Operation> deleteSettings() {
return deleteSettings;
}
/** Returns the object with the settings used for calls to delete. */
public OperationCallSettings<DeleteResourcePolicyRequest, Operation, Operation>
deleteOperationSettings() {
return deleteOperationSettings;
}
/** Returns the object with the settings used for calls to get. */
public UnaryCallSettings<GetResourcePolicyRequest, ResourcePolicy> getSettings() {
return getSettings;
}
/** Returns the object with the settings used for calls to getIamPolicy. */
public UnaryCallSettings<GetIamPolicyResourcePolicyRequest, Policy> getIamPolicySettings() {
return getIamPolicySettings;
}
/** Returns the object with the settings used for calls to insert. */
public UnaryCallSettings<InsertResourcePolicyRequest, Operation> insertSettings() {
return insertSettings;
}
/** Returns the object with the settings used for calls to insert. */
public OperationCallSettings<InsertResourcePolicyRequest, Operation, Operation>
insertOperationSettings() {
return insertOperationSettings;
}
/** Returns the object with the settings used for calls to list. */
public PagedCallSettings<ListResourcePoliciesRequest, ResourcePolicyList, ListPagedResponse>
listSettings() {
return listSettings;
}
/** Returns the object with the settings used for calls to patch. */
public UnaryCallSettings<PatchResourcePolicyRequest, Operation> patchSettings() {
return patchSettings;
}
/** Returns the object with the settings used for calls to patch. */
public OperationCallSettings<PatchResourcePolicyRequest, Operation, Operation>
patchOperationSettings() {
return patchOperationSettings;
}
/** Returns the object with the settings used for calls to setIamPolicy. */
public UnaryCallSettings<SetIamPolicyResourcePolicyRequest, Policy> setIamPolicySettings() {
return setIamPolicySettings;
}
/** Returns the object with the settings used for calls to testIamPermissions. */
public UnaryCallSettings<TestIamPermissionsResourcePolicyRequest, TestPermissionsResponse>
testIamPermissionsSettings() {
return testIamPermissionsSettings;
}
public ResourcePoliciesStub createStub() throws IOException {
if (getTransportChannelProvider()
.getTransportName()
.equals(HttpJsonTransportChannel.getHttpJsonTransportName())) {
return HttpJsonResourcePoliciesStub.create(this);
}
throw new UnsupportedOperationException(
String.format(
"Transport not supported: %s", getTransportChannelProvider().getTransportName()));
}
/** Returns the default service name. */
@Override
public String getServiceName() {
return "compute";
}
/** Returns a builder for the default ExecutorProvider for this service. */
public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() {
return InstantiatingExecutorProvider.newBuilder();
}
/** Returns the default service endpoint. */
@ObsoleteApi("Use getEndpoint() instead")
public static String getDefaultEndpoint() {
return "compute.googleapis.com:443";
}
/** Returns the default mTLS service endpoint. */
public static String getDefaultMtlsEndpoint() {
return "compute.mtls.googleapis.com:443";
}
/** Returns the default service scopes. */
public static List<String> getDefaultServiceScopes() {
return DEFAULT_SERVICE_SCOPES;
}
/** Returns a builder for the default credentials for this service. */
public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() {
return GoogleCredentialsProvider.newBuilder()
.setScopesToApply(DEFAULT_SERVICE_SCOPES)
.setUseJwtAccessWithScope(true);
}
/** Returns a builder for the default ChannelProvider for this service. */
public static InstantiatingHttpJsonChannelProvider.Builder
defaultHttpJsonTransportProviderBuilder() {
return InstantiatingHttpJsonChannelProvider.newBuilder();
}
public static TransportChannelProvider defaultTransportChannelProvider() {
return defaultHttpJsonTransportProviderBuilder().build();
}
public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() {
return ApiClientHeaderProvider.newBuilder()
.setGeneratedLibToken(
"gapic", GaxProperties.getLibraryVersion(ResourcePoliciesStubSettings.class))
.setTransportToken(
GaxHttpJsonProperties.getHttpJsonTokenName(),
GaxHttpJsonProperties.getHttpJsonVersion());
}
/** Returns a new builder for this class. */
public static Builder newBuilder() {
return Builder.createDefault();
}
/** Returns a new builder for this class. */
public static Builder newBuilder(ClientContext clientContext) {
return new Builder(clientContext);
}
/** Returns a builder containing all the values of this settings class. */
public Builder toBuilder() {
return new Builder(this);
}
protected ResourcePoliciesStubSettings(Builder settingsBuilder) throws IOException {
super(settingsBuilder);
aggregatedListSettings = settingsBuilder.aggregatedListSettings().build();
deleteSettings = settingsBuilder.deleteSettings().build();
deleteOperationSettings = settingsBuilder.deleteOperationSettings().build();
getSettings = settingsBuilder.getSettings().build();
getIamPolicySettings = settingsBuilder.getIamPolicySettings().build();
insertSettings = settingsBuilder.insertSettings().build();
insertOperationSettings = settingsBuilder.insertOperationSettings().build();
listSettings = settingsBuilder.listSettings().build();
patchSettings = settingsBuilder.patchSettings().build();
patchOperationSettings = settingsBuilder.patchOperationSettings().build();
setIamPolicySettings = settingsBuilder.setIamPolicySettings().build();
testIamPermissionsSettings = settingsBuilder.testIamPermissionsSettings().build();
}
/** Builder for ResourcePoliciesStubSettings. */
public static class Builder extends StubSettings.Builder<ResourcePoliciesStubSettings, Builder> {
private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders;
private final PagedCallSettings.Builder<
AggregatedListResourcePoliciesRequest,
ResourcePolicyAggregatedList,
AggregatedListPagedResponse>
aggregatedListSettings;
private final UnaryCallSettings.Builder<DeleteResourcePolicyRequest, Operation> deleteSettings;
private final OperationCallSettings.Builder<DeleteResourcePolicyRequest, Operation, Operation>
deleteOperationSettings;
private final UnaryCallSettings.Builder<GetResourcePolicyRequest, ResourcePolicy> getSettings;
private final UnaryCallSettings.Builder<GetIamPolicyResourcePolicyRequest, Policy>
getIamPolicySettings;
private final UnaryCallSettings.Builder<InsertResourcePolicyRequest, Operation> insertSettings;
private final OperationCallSettings.Builder<InsertResourcePolicyRequest, Operation, Operation>
insertOperationSettings;
private final PagedCallSettings.Builder<
ListResourcePoliciesRequest, ResourcePolicyList, ListPagedResponse>
listSettings;
private final UnaryCallSettings.Builder<PatchResourcePolicyRequest, Operation> patchSettings;
private final OperationCallSettings.Builder<PatchResourcePolicyRequest, Operation, Operation>
patchOperationSettings;
private final UnaryCallSettings.Builder<SetIamPolicyResourcePolicyRequest, Policy>
setIamPolicySettings;
private final UnaryCallSettings.Builder<
TestIamPermissionsResourcePolicyRequest, TestPermissionsResponse>
testIamPermissionsSettings;
private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>>
RETRYABLE_CODE_DEFINITIONS;
static {
ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions =
ImmutableMap.builder();
definitions.put(
"retry_policy_0_codes",
ImmutableSet.copyOf(
Lists.<StatusCode.Code>newArrayList(
StatusCode.Code.DEADLINE_EXCEEDED, StatusCode.Code.UNAVAILABLE)));
definitions.put(
"no_retry_1_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList()));
RETRYABLE_CODE_DEFINITIONS = definitions.build();
}
private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS;
static {
ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder();
RetrySettings settings = null;
settings =
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(100L))
.setRetryDelayMultiplier(1.3)
.setMaxRetryDelayDuration(Duration.ofMillis(60000L))
.setInitialRpcTimeoutDuration(Duration.ofMillis(600000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(600000L))
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build();
definitions.put("retry_policy_0_params", settings);
settings =
RetrySettings.newBuilder()
.setInitialRpcTimeoutDuration(Duration.ofMillis(600000L))
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ofMillis(600000L))
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build();
definitions.put("no_retry_1_params", settings);
RETRY_PARAM_DEFINITIONS = definitions.build();
}
protected Builder() {
this(((ClientContext) null));
}
protected Builder(ClientContext clientContext) {
super(clientContext);
aggregatedListSettings = PagedCallSettings.newBuilder(AGGREGATED_LIST_PAGE_STR_FACT);
deleteSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
deleteOperationSettings = OperationCallSettings.newBuilder();
getSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
getIamPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
insertSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
insertOperationSettings = OperationCallSettings.newBuilder();
listSettings = PagedCallSettings.newBuilder(LIST_PAGE_STR_FACT);
patchSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
patchOperationSettings = OperationCallSettings.newBuilder();
setIamPolicySettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
testIamPermissionsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
aggregatedListSettings,
deleteSettings,
getSettings,
getIamPolicySettings,
insertSettings,
listSettings,
patchSettings,
setIamPolicySettings,
testIamPermissionsSettings);
initDefaults(this);
}
protected Builder(ResourcePoliciesStubSettings settings) {
super(settings);
aggregatedListSettings = settings.aggregatedListSettings.toBuilder();
deleteSettings = settings.deleteSettings.toBuilder();
deleteOperationSettings = settings.deleteOperationSettings.toBuilder();
getSettings = settings.getSettings.toBuilder();
getIamPolicySettings = settings.getIamPolicySettings.toBuilder();
insertSettings = settings.insertSettings.toBuilder();
insertOperationSettings = settings.insertOperationSettings.toBuilder();
listSettings = settings.listSettings.toBuilder();
patchSettings = settings.patchSettings.toBuilder();
patchOperationSettings = settings.patchOperationSettings.toBuilder();
setIamPolicySettings = settings.setIamPolicySettings.toBuilder();
testIamPermissionsSettings = settings.testIamPermissionsSettings.toBuilder();
unaryMethodSettingsBuilders =
ImmutableList.<UnaryCallSettings.Builder<?, ?>>of(
aggregatedListSettings,
deleteSettings,
getSettings,
getIamPolicySettings,
insertSettings,
listSettings,
patchSettings,
setIamPolicySettings,
testIamPermissionsSettings);
}
private static Builder createDefault() {
Builder builder = new Builder(((ClientContext) null));
builder.setTransportChannelProvider(defaultTransportChannelProvider());
builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build());
builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build());
builder.setMtlsEndpoint(getDefaultMtlsEndpoint());
builder.setSwitchToMtlsEndpointAllowed(true);
return initDefaults(builder);
}
private static Builder initDefaults(Builder builder) {
builder
.aggregatedListSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.deleteSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.getSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.getIamPolicySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.insertSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.listSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params"));
builder
.patchSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.setIamPolicySettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.testIamPermissionsSettings()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"));
builder
.deleteOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<DeleteResourcePolicyRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Operation.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(Operation.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(500L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(20000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build()));
builder
.insertOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<InsertResourcePolicyRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Operation.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(Operation.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(500L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(20000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build()));
builder
.patchOperationSettings()
.setInitialCallSettings(
UnaryCallSettings
.<PatchResourcePolicyRequest, OperationSnapshot>newUnaryCallSettingsBuilder()
.setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes"))
.setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params"))
.build())
.setResponseTransformer(
ProtoOperationTransformers.ResponseTransformer.create(Operation.class))
.setMetadataTransformer(
ProtoOperationTransformers.MetadataTransformer.create(Operation.class))
.setPollingAlgorithm(
OperationTimedPollAlgorithm.create(
RetrySettings.newBuilder()
.setInitialRetryDelayDuration(Duration.ofMillis(500L))
.setRetryDelayMultiplier(1.5)
.setMaxRetryDelayDuration(Duration.ofMillis(20000L))
.setInitialRpcTimeoutDuration(Duration.ZERO)
.setRpcTimeoutMultiplier(1.0)
.setMaxRpcTimeoutDuration(Duration.ZERO)
.setTotalTimeoutDuration(Duration.ofMillis(600000L))
.build()));
return builder;
}
/**
* Applies the given settings updater function to all of the unary API methods in this service.
*
* <p>Note: This method does not support applying settings to streaming methods.
*/
public Builder applyToAllUnaryMethods(
ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) {
super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater);
return this;
}
public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() {
return unaryMethodSettingsBuilders;
}
/** Returns the builder for the settings used for calls to aggregatedList. */
public PagedCallSettings.Builder<
AggregatedListResourcePoliciesRequest,
ResourcePolicyAggregatedList,
AggregatedListPagedResponse>
aggregatedListSettings() {
return aggregatedListSettings;
}
/** Returns the builder for the settings used for calls to delete. */
public UnaryCallSettings.Builder<DeleteResourcePolicyRequest, Operation> deleteSettings() {
return deleteSettings;
}
/** Returns the builder for the settings used for calls to delete. */
public OperationCallSettings.Builder<DeleteResourcePolicyRequest, Operation, Operation>
deleteOperationSettings() {
return deleteOperationSettings;
}
/** Returns the builder for the settings used for calls to get. */
public UnaryCallSettings.Builder<GetResourcePolicyRequest, ResourcePolicy> getSettings() {
return getSettings;
}
/** Returns the builder for the settings used for calls to getIamPolicy. */
public UnaryCallSettings.Builder<GetIamPolicyResourcePolicyRequest, Policy>
getIamPolicySettings() {
return getIamPolicySettings;
}
/** Returns the builder for the settings used for calls to insert. */
public UnaryCallSettings.Builder<InsertResourcePolicyRequest, Operation> insertSettings() {
return insertSettings;
}
/** Returns the builder for the settings used for calls to insert. */
public OperationCallSettings.Builder<InsertResourcePolicyRequest, Operation, Operation>
insertOperationSettings() {
return insertOperationSettings;
}
/** Returns the builder for the settings used for calls to list. */
public PagedCallSettings.Builder<
ListResourcePoliciesRequest, ResourcePolicyList, ListPagedResponse>
listSettings() {
return listSettings;
}
/** Returns the builder for the settings used for calls to patch. */
public UnaryCallSettings.Builder<PatchResourcePolicyRequest, Operation> patchSettings() {
return patchSettings;
}
/** Returns the builder for the settings used for calls to patch. */
public OperationCallSettings.Builder<PatchResourcePolicyRequest, Operation, Operation>
patchOperationSettings() {
return patchOperationSettings;
}
/** Returns the builder for the settings used for calls to setIamPolicy. */
public UnaryCallSettings.Builder<SetIamPolicyResourcePolicyRequest, Policy>
setIamPolicySettings() {
return setIamPolicySettings;
}
/** Returns the builder for the settings used for calls to testIamPermissions. */
public UnaryCallSettings.Builder<
TestIamPermissionsResourcePolicyRequest, TestPermissionsResponse>
testIamPermissionsSettings() {
return testIamPermissionsSettings;
}
@Override
public ResourcePoliciesStubSettings build() throws IOException {
return new ResourcePoliciesStubSettings(this);
}
}
}
|
googleapis/google-cloud-java | 37,014 | java-dialogflow/proto-google-cloud-dialogflow-v2beta1/src/main/java/com/google/cloud/dialogflow/v2beta1/MessageAnnotation.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/dialogflow/v2beta1/participant.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.dialogflow.v2beta1;
/**
*
*
* <pre>
* Represents the result of annotation for the message.
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2beta1.MessageAnnotation}
*/
public final class MessageAnnotation extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2beta1.MessageAnnotation)
MessageAnnotationOrBuilder {
private static final long serialVersionUID = 0L;
// Use MessageAnnotation.newBuilder() to construct.
private MessageAnnotation(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private MessageAnnotation() {
parts_ = java.util.Collections.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new MessageAnnotation();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2beta1.ParticipantProto
.internal_static_google_cloud_dialogflow_v2beta1_MessageAnnotation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2beta1.ParticipantProto
.internal_static_google_cloud_dialogflow_v2beta1_MessageAnnotation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2beta1.MessageAnnotation.class,
com.google.cloud.dialogflow.v2beta1.MessageAnnotation.Builder.class);
}
public static final int PARTS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePart> parts_;
/**
*
*
* <pre>
* Optional. The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.AnnotatedMessagePart parts = 1;</code>
*/
@java.lang.Override
public java.util.List<com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePart> getPartsList() {
return parts_;
}
/**
*
*
* <pre>
* Optional. The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.AnnotatedMessagePart parts = 1;</code>
*/
@java.lang.Override
public java.util.List<? extends com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePartOrBuilder>
getPartsOrBuilderList() {
return parts_;
}
/**
*
*
* <pre>
* Optional. The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.AnnotatedMessagePart parts = 1;</code>
*/
@java.lang.Override
public int getPartsCount() {
return parts_.size();
}
/**
*
*
* <pre>
* Optional. The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.AnnotatedMessagePart parts = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePart getParts(int index) {
return parts_.get(index);
}
/**
*
*
* <pre>
* Optional. The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.AnnotatedMessagePart parts = 1;</code>
*/
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePartOrBuilder getPartsOrBuilder(
int index) {
return parts_.get(index);
}
public static final int CONTAIN_ENTITIES_FIELD_NUMBER = 2;
private boolean containEntities_ = false;
/**
*
*
* <pre>
* Required. Indicates whether the text message contains entities.
* </pre>
*
* <code>bool contain_entities = 2;</code>
*
* @return The containEntities.
*/
@java.lang.Override
public boolean getContainEntities() {
return containEntities_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < parts_.size(); i++) {
output.writeMessage(1, parts_.get(i));
}
if (containEntities_ != false) {
output.writeBool(2, containEntities_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < parts_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, parts_.get(i));
}
if (containEntities_ != false) {
size += com.google.protobuf.CodedOutputStream.computeBoolSize(2, containEntities_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.dialogflow.v2beta1.MessageAnnotation)) {
return super.equals(obj);
}
com.google.cloud.dialogflow.v2beta1.MessageAnnotation other =
(com.google.cloud.dialogflow.v2beta1.MessageAnnotation) obj;
if (!getPartsList().equals(other.getPartsList())) return false;
if (getContainEntities() != other.getContainEntities()) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getPartsCount() > 0) {
hash = (37 * hash) + PARTS_FIELD_NUMBER;
hash = (53 * hash) + getPartsList().hashCode();
}
hash = (37 * hash) + CONTAIN_ENTITIES_FIELD_NUMBER;
hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getContainEntities());
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.dialogflow.v2beta1.MessageAnnotation parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.MessageAnnotation parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.MessageAnnotation parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.MessageAnnotation parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.MessageAnnotation parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.dialogflow.v2beta1.MessageAnnotation parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.MessageAnnotation parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.MessageAnnotation parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.MessageAnnotation parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.MessageAnnotation parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.dialogflow.v2beta1.MessageAnnotation parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.dialogflow.v2beta1.MessageAnnotation parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.dialogflow.v2beta1.MessageAnnotation prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Represents the result of annotation for the message.
* </pre>
*
* Protobuf type {@code google.cloud.dialogflow.v2beta1.MessageAnnotation}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2beta1.MessageAnnotation)
com.google.cloud.dialogflow.v2beta1.MessageAnnotationOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.dialogflow.v2beta1.ParticipantProto
.internal_static_google_cloud_dialogflow_v2beta1_MessageAnnotation_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.dialogflow.v2beta1.ParticipantProto
.internal_static_google_cloud_dialogflow_v2beta1_MessageAnnotation_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.dialogflow.v2beta1.MessageAnnotation.class,
com.google.cloud.dialogflow.v2beta1.MessageAnnotation.Builder.class);
}
// Construct using com.google.cloud.dialogflow.v2beta1.MessageAnnotation.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (partsBuilder_ == null) {
parts_ = java.util.Collections.emptyList();
} else {
parts_ = null;
partsBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
containEntities_ = false;
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.dialogflow.v2beta1.ParticipantProto
.internal_static_google_cloud_dialogflow_v2beta1_MessageAnnotation_descriptor;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.MessageAnnotation getDefaultInstanceForType() {
return com.google.cloud.dialogflow.v2beta1.MessageAnnotation.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.MessageAnnotation build() {
com.google.cloud.dialogflow.v2beta1.MessageAnnotation result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.MessageAnnotation buildPartial() {
com.google.cloud.dialogflow.v2beta1.MessageAnnotation result =
new com.google.cloud.dialogflow.v2beta1.MessageAnnotation(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(
com.google.cloud.dialogflow.v2beta1.MessageAnnotation result) {
if (partsBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
parts_ = java.util.Collections.unmodifiableList(parts_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.parts_ = parts_;
} else {
result.parts_ = partsBuilder_.build();
}
}
private void buildPartial0(com.google.cloud.dialogflow.v2beta1.MessageAnnotation result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.containEntities_ = containEntities_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.dialogflow.v2beta1.MessageAnnotation) {
return mergeFrom((com.google.cloud.dialogflow.v2beta1.MessageAnnotation) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.dialogflow.v2beta1.MessageAnnotation other) {
if (other == com.google.cloud.dialogflow.v2beta1.MessageAnnotation.getDefaultInstance())
return this;
if (partsBuilder_ == null) {
if (!other.parts_.isEmpty()) {
if (parts_.isEmpty()) {
parts_ = other.parts_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensurePartsIsMutable();
parts_.addAll(other.parts_);
}
onChanged();
}
} else {
if (!other.parts_.isEmpty()) {
if (partsBuilder_.isEmpty()) {
partsBuilder_.dispose();
partsBuilder_ = null;
parts_ = other.parts_;
bitField0_ = (bitField0_ & ~0x00000001);
partsBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getPartsFieldBuilder()
: null;
} else {
partsBuilder_.addAllMessages(other.parts_);
}
}
}
if (other.getContainEntities() != false) {
setContainEntities(other.getContainEntities());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePart m =
input.readMessage(
com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePart.parser(),
extensionRegistry);
if (partsBuilder_ == null) {
ensurePartsIsMutable();
parts_.add(m);
} else {
partsBuilder_.addMessage(m);
}
break;
} // case 10
case 16:
{
containEntities_ = input.readBool();
bitField0_ |= 0x00000002;
break;
} // case 16
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePart> parts_ =
java.util.Collections.emptyList();
private void ensurePartsIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
parts_ =
new java.util.ArrayList<com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePart>(
parts_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePart,
com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePart.Builder,
com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePartOrBuilder>
partsBuilder_;
/**
*
*
* <pre>
* Optional. The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.AnnotatedMessagePart parts = 1;</code>
*/
public java.util.List<com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePart> getPartsList() {
if (partsBuilder_ == null) {
return java.util.Collections.unmodifiableList(parts_);
} else {
return partsBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Optional. The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.AnnotatedMessagePart parts = 1;</code>
*/
public int getPartsCount() {
if (partsBuilder_ == null) {
return parts_.size();
} else {
return partsBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Optional. The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.AnnotatedMessagePart parts = 1;</code>
*/
public com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePart getParts(int index) {
if (partsBuilder_ == null) {
return parts_.get(index);
} else {
return partsBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Optional. The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.AnnotatedMessagePart parts = 1;</code>
*/
public Builder setParts(
int index, com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePart value) {
if (partsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePartsIsMutable();
parts_.set(index, value);
onChanged();
} else {
partsBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Optional. The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.AnnotatedMessagePart parts = 1;</code>
*/
public Builder setParts(
int index,
com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePart.Builder builderForValue) {
if (partsBuilder_ == null) {
ensurePartsIsMutable();
parts_.set(index, builderForValue.build());
onChanged();
} else {
partsBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Optional. The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.AnnotatedMessagePart parts = 1;</code>
*/
public Builder addParts(com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePart value) {
if (partsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePartsIsMutable();
parts_.add(value);
onChanged();
} else {
partsBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Optional. The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.AnnotatedMessagePart parts = 1;</code>
*/
public Builder addParts(
int index, com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePart value) {
if (partsBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensurePartsIsMutable();
parts_.add(index, value);
onChanged();
} else {
partsBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Optional. The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.AnnotatedMessagePart parts = 1;</code>
*/
public Builder addParts(
com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePart.Builder builderForValue) {
if (partsBuilder_ == null) {
ensurePartsIsMutable();
parts_.add(builderForValue.build());
onChanged();
} else {
partsBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Optional. The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.AnnotatedMessagePart parts = 1;</code>
*/
public Builder addParts(
int index,
com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePart.Builder builderForValue) {
if (partsBuilder_ == null) {
ensurePartsIsMutable();
parts_.add(index, builderForValue.build());
onChanged();
} else {
partsBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Optional. The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.AnnotatedMessagePart parts = 1;</code>
*/
public Builder addAllParts(
java.lang.Iterable<? extends com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePart>
values) {
if (partsBuilder_ == null) {
ensurePartsIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, parts_);
onChanged();
} else {
partsBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Optional. The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.AnnotatedMessagePart parts = 1;</code>
*/
public Builder clearParts() {
if (partsBuilder_ == null) {
parts_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
partsBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Optional. The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.AnnotatedMessagePart parts = 1;</code>
*/
public Builder removeParts(int index) {
if (partsBuilder_ == null) {
ensurePartsIsMutable();
parts_.remove(index);
onChanged();
} else {
partsBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Optional. The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.AnnotatedMessagePart parts = 1;</code>
*/
public com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePart.Builder getPartsBuilder(
int index) {
return getPartsFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Optional. The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.AnnotatedMessagePart parts = 1;</code>
*/
public com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePartOrBuilder getPartsOrBuilder(
int index) {
if (partsBuilder_ == null) {
return parts_.get(index);
} else {
return partsBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Optional. The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.AnnotatedMessagePart parts = 1;</code>
*/
public java.util.List<
? extends com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePartOrBuilder>
getPartsOrBuilderList() {
if (partsBuilder_ != null) {
return partsBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(parts_);
}
}
/**
*
*
* <pre>
* Optional. The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.AnnotatedMessagePart parts = 1;</code>
*/
public com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePart.Builder addPartsBuilder() {
return getPartsFieldBuilder()
.addBuilder(
com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePart.getDefaultInstance());
}
/**
*
*
* <pre>
* Optional. The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.AnnotatedMessagePart parts = 1;</code>
*/
public com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePart.Builder addPartsBuilder(
int index) {
return getPartsFieldBuilder()
.addBuilder(
index, com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePart.getDefaultInstance());
}
/**
*
*
* <pre>
* Optional. The collection of annotated message parts ordered by their
* position in the message. You can recover the annotated message by
* concatenating [AnnotatedMessagePart.text].
* </pre>
*
* <code>repeated .google.cloud.dialogflow.v2beta1.AnnotatedMessagePart parts = 1;</code>
*/
public java.util.List<com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePart.Builder>
getPartsBuilderList() {
return getPartsFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePart,
com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePart.Builder,
com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePartOrBuilder>
getPartsFieldBuilder() {
if (partsBuilder_ == null) {
partsBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePart,
com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePart.Builder,
com.google.cloud.dialogflow.v2beta1.AnnotatedMessagePartOrBuilder>(
parts_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
parts_ = null;
}
return partsBuilder_;
}
private boolean containEntities_;
/**
*
*
* <pre>
* Required. Indicates whether the text message contains entities.
* </pre>
*
* <code>bool contain_entities = 2;</code>
*
* @return The containEntities.
*/
@java.lang.Override
public boolean getContainEntities() {
return containEntities_;
}
/**
*
*
* <pre>
* Required. Indicates whether the text message contains entities.
* </pre>
*
* <code>bool contain_entities = 2;</code>
*
* @param value The containEntities to set.
* @return This builder for chaining.
*/
public Builder setContainEntities(boolean value) {
containEntities_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Required. Indicates whether the text message contains entities.
* </pre>
*
* <code>bool contain_entities = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearContainEntities() {
bitField0_ = (bitField0_ & ~0x00000002);
containEntities_ = false;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2beta1.MessageAnnotation)
}
// @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2beta1.MessageAnnotation)
private static final com.google.cloud.dialogflow.v2beta1.MessageAnnotation DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2beta1.MessageAnnotation();
}
public static com.google.cloud.dialogflow.v2beta1.MessageAnnotation getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<MessageAnnotation> PARSER =
new com.google.protobuf.AbstractParser<MessageAnnotation>() {
@java.lang.Override
public MessageAnnotation parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<MessageAnnotation> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<MessageAnnotation> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.dialogflow.v2beta1.MessageAnnotation getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/nifi | 36,869 | nifi-extension-bundles/nifi-mqtt-bundle/nifi-mqtt-processors/src/test/java/org/apache/nifi/processors/mqtt/TestConsumeMQTT.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.nifi.processors.mqtt;
import org.apache.nifi.processor.ProcessContext;
import org.apache.nifi.processor.ProcessSession;
import org.apache.nifi.processors.mqtt.common.AbstractMQTTProcessor;
import org.apache.nifi.processors.mqtt.common.MqttClient;
import org.apache.nifi.processors.mqtt.common.MqttTestClient;
import org.apache.nifi.processors.mqtt.common.ReceivedMqttMessage;
import org.apache.nifi.processors.mqtt.common.StandardMqttMessage;
import org.apache.nifi.provenance.ProvenanceEventRecord;
import org.apache.nifi.provenance.ProvenanceEventType;
import org.apache.nifi.reporting.InitializationException;
import org.apache.nifi.ssl.SSLContextService;
import org.apache.nifi.util.MockFlowFile;
import org.apache.nifi.util.TestRunner;
import org.apache.nifi.util.TestRunners;
import org.junit.jupiter.api.AfterEach;
import org.junit.jupiter.api.Test;
import java.lang.reflect.Field;
import java.lang.reflect.InvocationTargetException;
import java.lang.reflect.Method;
import java.lang.reflect.Proxy;
import java.nio.ByteBuffer;
import java.nio.charset.StandardCharsets;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.concurrent.BlockingQueue;
import static org.apache.nifi.processors.mqtt.ConsumeMQTT.BROKER_ATTRIBUTE_KEY;
import static org.apache.nifi.processors.mqtt.ConsumeMQTT.IS_DUPLICATE_ATTRIBUTE_KEY;
import static org.apache.nifi.processors.mqtt.ConsumeMQTT.IS_RETAINED_ATTRIBUTE_KEY;
import static org.apache.nifi.processors.mqtt.ConsumeMQTT.QOS_ATTRIBUTE_KEY;
import static org.apache.nifi.processors.mqtt.ConsumeMQTT.TOPIC_ATTRIBUTE_KEY;
import static org.apache.nifi.processors.mqtt.common.MqttConstants.ALLOWABLE_VALUE_CLEAN_SESSION_FALSE;
import static org.apache.nifi.processors.mqtt.common.MqttTestUtil.createJsonRecordSetReaderService;
import static org.apache.nifi.processors.mqtt.common.MqttTestUtil.createJsonRecordSetWriterService;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertFalse;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertNull;
import static org.junit.jupiter.api.Assertions.assertThrows;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.mockito.Mockito.mock;
import static org.mockito.Mockito.when;
public class TestConsumeMQTT {
private static final int PUBLISH_WAIT_MS = 0;
private static final String THIS_IS_NOT_JSON = "ThisIsNotAJSON";
private static final String BROKER_URI = "tcp://localhost:1883";
private static final String SSL_BROKER_URI = "ssl://localhost:8883";
private static final String WS_BROKER_URI = "ws://localhost:15675/ws";
private static final String WSS_BROKER_URI = "wss://localhost:15676/ws";
private static final String CLUSTERED_BROKER_URI = "tcp://localhost:1883,tcp://localhost:1884";
private static final String SSL_CLUSTERED_BROKER_URI = "ssl://localhost:1883,ssl://localhost:1884";
private static final String INVALID_BROKER_URI = "http://localhost:1883";
private static final String INVALID_CLUSTERED_BROKER_URI = "ssl://localhost:1883,tcp://localhost:1884";
private static final String CLIENT_ID = "TestClient";
private static final String TOPIC_NAME = "test/topic";
private static final String INTERNAL_QUEUE_SIZE = "100";
private static final String STRING_MESSAGE = "testMessage";
private static final String JSON_PAYLOAD = "{\"name\":\"Apache NiFi\"}";
private static final int AT_MOST_ONCE = 0;
private static final int AT_LEAST_ONCE = 1;
private static final int EXACTLY_ONCE = 2;
private MqttTestClient mqttTestClient;
private TestRunner testRunner;
@AfterEach
public void cleanup() {
testRunner = null;
mqttTestClient = null;
}
@Test
public void testClientIDConfiguration() {
testRunner = initializeTestRunner();
testRunner.assertValid();
testRunner.setProperty(ConsumeMQTT.PROP_GROUPID, "group");
testRunner.assertNotValid();
testRunner.setProperty(ConsumeMQTT.PROP_CLIENTID, "${hostname()}");
testRunner.assertValid();
testRunner.removeProperty(ConsumeMQTT.PROP_CLIENTID);
testRunner.assertValid();
}
@Test
public void testLastWillConfig() {
mqttTestClient = new MqttTestClient(MqttTestClient.ConnectType.Subscriber);
testRunner = initializeTestRunner(mqttTestClient);
testRunner.setProperty(ConsumeMQTT.PROP_LAST_WILL_MESSAGE, "lastWill message");
testRunner.assertNotValid();
testRunner.setProperty(ConsumeMQTT.PROP_LAST_WILL_TOPIC, "lastWill topic");
testRunner.assertValid();
}
@Test
public void testBrokerUriConfig() {
mqttTestClient = new MqttTestClient(MqttTestClient.ConnectType.Subscriber);
testRunner = initializeTestRunner(mqttTestClient);
testRunner.setProperty(ConsumeMQTT.PROP_BROKER_URI, INVALID_BROKER_URI);
testRunner.assertNotValid();
testRunner.setProperty(ConsumeMQTT.PROP_BROKER_URI, INVALID_CLUSTERED_BROKER_URI);
testRunner.assertNotValid();
testRunner.setProperty(ConsumeMQTT.PROP_BROKER_URI, BROKER_URI);
testRunner.assertValid();
testRunner.setProperty(ConsumeMQTT.PROP_BROKER_URI, CLUSTERED_BROKER_URI);
testRunner.assertValid();
testRunner.setProperty(ConsumeMQTT.PROP_BROKER_URI, WS_BROKER_URI);
testRunner.assertValid();
}
@Test
public void testSSLBrokerUriRequiresSSLContextServiceConfig() throws InitializationException {
mqttTestClient = new MqttTestClient(MqttTestClient.ConnectType.Subscriber);
testRunner = initializeTestRunner(mqttTestClient);
testRunner.setProperty(ConsumeMQTT.PROP_BROKER_URI, SSL_BROKER_URI);
testRunner.assertNotValid();
testRunner.setProperty(ConsumeMQTT.PROP_BROKER_URI, SSL_CLUSTERED_BROKER_URI);
testRunner.assertNotValid();
final String identifier = addSSLContextService(testRunner);
testRunner.setProperty(ConsumeMQTT.PROP_SSL_CONTEXT_SERVICE, identifier);
testRunner.assertValid();
testRunner.setProperty(ConsumeMQTT.PROP_BROKER_URI, SSL_BROKER_URI);
testRunner.assertValid();
testRunner.setProperty(ConsumeMQTT.PROP_BROKER_URI, WSS_BROKER_URI);
testRunner.assertValid();
}
@Test
public void testRecordAndDemarcatorConfigurationTogetherIsInvalid() throws InitializationException {
mqttTestClient = new MqttTestClient(MqttTestClient.ConnectType.Subscriber);
testRunner = initializeTestRunner(mqttTestClient);
testRunner.setProperty(ConsumeMQTT.RECORD_READER, createJsonRecordSetReaderService(testRunner));
testRunner.setProperty(ConsumeMQTT.RECORD_WRITER, createJsonRecordSetWriterService(testRunner));
testRunner.setProperty(ConsumeMQTT.MESSAGE_DEMARCATOR, "\n");
testRunner.assertNotValid();
}
@Test
public void testQoS2() throws Exception {
mqttTestClient = new MqttTestClient(MqttTestClient.ConnectType.Subscriber);
testRunner = initializeTestRunner(mqttTestClient);
testRunner.setProperty(ConsumeMQTT.PROP_QOS, "2");
testRunner.assertValid();
final ConsumeMQTT consumeMQTT = (ConsumeMQTT) testRunner.getProcessor();
consumeMQTT.onScheduled(testRunner.getProcessContext());
reconnect(consumeMQTT, testRunner.getProcessContext());
Thread.sleep(PUBLISH_WAIT_MS);
assertTrue(isConnected(consumeMQTT));
publishMessage(STRING_MESSAGE, EXACTLY_ONCE);
Thread.sleep(PUBLISH_WAIT_MS);
testRunner.run(1, false, false);
testRunner.assertTransferCount(ConsumeMQTT.REL_MESSAGE, 1);
assertProvenanceEvents(1);
final List<MockFlowFile> flowFiles = testRunner.getFlowFilesForRelationship(ConsumeMQTT.REL_MESSAGE);
final MockFlowFile flowFile = flowFiles.getFirst();
flowFile.assertContentEquals("testMessage");
flowFile.assertAttributeEquals(BROKER_ATTRIBUTE_KEY, BROKER_URI);
flowFile.assertAttributeEquals(TOPIC_ATTRIBUTE_KEY, TOPIC_NAME);
flowFile.assertAttributeEquals(QOS_ATTRIBUTE_KEY, "2");
flowFile.assertAttributeEquals(IS_DUPLICATE_ATTRIBUTE_KEY, "false");
flowFile.assertAttributeEquals(IS_RETAINED_ATTRIBUTE_KEY, "false");
}
@Test
public void testQoS2NotCleanSession() throws Exception {
mqttTestClient = new MqttTestClient(MqttTestClient.ConnectType.Subscriber);
testRunner = initializeTestRunner(mqttTestClient);
testRunner.setProperty(ConsumeMQTT.PROP_QOS, "2");
testRunner.setProperty(ConsumeMQTT.PROP_CLEAN_SESSION, ALLOWABLE_VALUE_CLEAN_SESSION_FALSE);
testRunner.assertValid();
final ConsumeMQTT consumeMQTT = (ConsumeMQTT) testRunner.getProcessor();
consumeMQTT.onScheduled(testRunner.getProcessContext());
reconnect(consumeMQTT, testRunner.getProcessContext());
Thread.sleep(PUBLISH_WAIT_MS);
assertTrue(isConnected(consumeMQTT));
consumeMQTT.onUnscheduled(testRunner.getProcessContext());
publishMessage(STRING_MESSAGE, EXACTLY_ONCE);
consumeMQTT.onScheduled(testRunner.getProcessContext());
reconnect(consumeMQTT, testRunner.getProcessContext());
Thread.sleep(PUBLISH_WAIT_MS);
assertTrue(isConnected(consumeMQTT));
testRunner.run(1, false, false);
testRunner.assertTransferCount(ConsumeMQTT.REL_MESSAGE, 1);
assertProvenanceEvents(1);
final List<MockFlowFile> flowFiles = testRunner.getFlowFilesForRelationship(ConsumeMQTT.REL_MESSAGE);
final MockFlowFile flowFile = flowFiles.getFirst();
flowFile.assertContentEquals("testMessage");
flowFile.assertAttributeEquals(BROKER_ATTRIBUTE_KEY, BROKER_URI);
flowFile.assertAttributeEquals(TOPIC_ATTRIBUTE_KEY, TOPIC_NAME);
flowFile.assertAttributeEquals(QOS_ATTRIBUTE_KEY, "2");
flowFile.assertAttributeEquals(IS_DUPLICATE_ATTRIBUTE_KEY, "false");
flowFile.assertAttributeEquals(IS_RETAINED_ATTRIBUTE_KEY, "false");
}
@Test
public void testQoS1() throws Exception {
mqttTestClient = new MqttTestClient(MqttTestClient.ConnectType.Subscriber);
testRunner = initializeTestRunner(mqttTestClient);
testRunner.setProperty(ConsumeMQTT.PROP_QOS, "1");
testRunner.assertValid();
final ConsumeMQTT consumeMQTT = (ConsumeMQTT) testRunner.getProcessor();
consumeMQTT.onScheduled(testRunner.getProcessContext());
reconnect(consumeMQTT, testRunner.getProcessContext());
Thread.sleep(PUBLISH_WAIT_MS);
assertTrue(isConnected(consumeMQTT));
publishMessage(STRING_MESSAGE, AT_LEAST_ONCE);
Thread.sleep(PUBLISH_WAIT_MS);
testRunner.run(1, false, false);
final List<MockFlowFile> flowFiles = testRunner.getFlowFilesForRelationship(ConsumeMQTT.REL_MESSAGE);
assertFalse(flowFiles.isEmpty());
assertProvenanceEvents(flowFiles.size());
final MockFlowFile flowFile = flowFiles.getFirst();
flowFile.assertContentEquals("testMessage");
flowFile.assertAttributeEquals(BROKER_ATTRIBUTE_KEY, BROKER_URI);
flowFile.assertAttributeEquals(TOPIC_ATTRIBUTE_KEY, TOPIC_NAME);
flowFile.assertAttributeEquals(QOS_ATTRIBUTE_KEY, "1");
flowFile.assertAttributeEquals(IS_DUPLICATE_ATTRIBUTE_KEY, "false");
flowFile.assertAttributeEquals(IS_RETAINED_ATTRIBUTE_KEY, "false");
}
@Test
public void testQoS1NotCleanSession() throws Exception {
mqttTestClient = new MqttTestClient(MqttTestClient.ConnectType.Subscriber);
testRunner = initializeTestRunner(mqttTestClient);
testRunner.setProperty(ConsumeMQTT.PROP_QOS, "1");
testRunner.setProperty(ConsumeMQTT.PROP_CLEAN_SESSION, ALLOWABLE_VALUE_CLEAN_SESSION_FALSE);
testRunner.assertValid();
final ConsumeMQTT consumeMQTT = (ConsumeMQTT) testRunner.getProcessor();
consumeMQTT.onScheduled(testRunner.getProcessContext());
reconnect(consumeMQTT, testRunner.getProcessContext());
Thread.sleep(PUBLISH_WAIT_MS);
assertTrue(isConnected(consumeMQTT));
consumeMQTT.onUnscheduled(testRunner.getProcessContext());
publishMessage(STRING_MESSAGE, AT_LEAST_ONCE);
consumeMQTT.onScheduled(testRunner.getProcessContext());
reconnect(consumeMQTT, testRunner.getProcessContext());
Thread.sleep(PUBLISH_WAIT_MS);
assertTrue(isConnected(consumeMQTT));
testRunner.run(1, false, false);
testRunner.assertTransferCount(ConsumeMQTT.REL_MESSAGE, 1);
final List<MockFlowFile> flowFiles = testRunner.getFlowFilesForRelationship(ConsumeMQTT.REL_MESSAGE);
assertFalse(flowFiles.isEmpty());
assertProvenanceEvents(flowFiles.size());
final MockFlowFile flowFile = flowFiles.getFirst();
flowFile.assertContentEquals("testMessage");
flowFile.assertAttributeEquals(BROKER_ATTRIBUTE_KEY, BROKER_URI);
flowFile.assertAttributeEquals(TOPIC_ATTRIBUTE_KEY, TOPIC_NAME);
flowFile.assertAttributeEquals(QOS_ATTRIBUTE_KEY, "1");
flowFile.assertAttributeEquals(IS_DUPLICATE_ATTRIBUTE_KEY, "false");
flowFile.assertAttributeEquals(IS_RETAINED_ATTRIBUTE_KEY, "false");
}
@Test
public void testQoS0() throws Exception {
mqttTestClient = new MqttTestClient(MqttTestClient.ConnectType.Subscriber);
testRunner = initializeTestRunner(mqttTestClient);
testRunner.setProperty(ConsumeMQTT.PROP_QOS, "0");
testRunner.assertValid();
final ConsumeMQTT consumeMQTT = (ConsumeMQTT) testRunner.getProcessor();
consumeMQTT.onScheduled(testRunner.getProcessContext());
reconnect(consumeMQTT, testRunner.getProcessContext());
Thread.sleep(PUBLISH_WAIT_MS);
assertTrue(isConnected(consumeMQTT));
publishMessage(STRING_MESSAGE, AT_MOST_ONCE);
Thread.sleep(PUBLISH_WAIT_MS);
testRunner.run(1, false, false);
final List<MockFlowFile> flowFiles = testRunner.getFlowFilesForRelationship(ConsumeMQTT.REL_MESSAGE);
assertTrue(flowFiles.size() < 2);
assertProvenanceEvents(flowFiles.size());
if (flowFiles.size() == 1) {
MockFlowFile flowFile = flowFiles.getFirst();
flowFile.assertContentEquals("testMessage");
flowFile.assertAttributeEquals(BROKER_ATTRIBUTE_KEY, BROKER_URI);
flowFile.assertAttributeEquals(TOPIC_ATTRIBUTE_KEY, TOPIC_NAME);
flowFile.assertAttributeEquals(QOS_ATTRIBUTE_KEY, "0");
flowFile.assertAttributeEquals(IS_DUPLICATE_ATTRIBUTE_KEY, "false");
flowFile.assertAttributeEquals(IS_RETAINED_ATTRIBUTE_KEY, "false");
}
}
@Test
public void testOnStoppedFinish() throws Exception {
mqttTestClient = new MqttTestClient(MqttTestClient.ConnectType.Subscriber);
testRunner = initializeTestRunner(mqttTestClient);
testRunner.setProperty(ConsumeMQTT.PROP_QOS, "2");
testRunner.assertValid();
final byte[] content = ByteBuffer.wrap("testMessage".getBytes()).array();
final ReceivedMqttMessage testMessage = new ReceivedMqttMessage(content, 2, false, TOPIC_NAME);
final ConsumeMQTT consumeMQTT = (ConsumeMQTT) testRunner.getProcessor();
consumeMQTT.onScheduled(testRunner.getProcessContext());
reconnect(consumeMQTT, testRunner.getProcessContext());
Thread.sleep(PUBLISH_WAIT_MS);
assertTrue(isConnected(consumeMQTT));
consumeMQTT.processSessionFactory = testRunner.getProcessSessionFactory();
final Field f = ConsumeMQTT.class.getDeclaredField("mqttQueue");
f.setAccessible(true);
@SuppressWarnings("unchecked")
final BlockingQueue<ReceivedMqttMessage> queue = (BlockingQueue<ReceivedMqttMessage>) f.get(consumeMQTT);
queue.add(testMessage);
consumeMQTT.onUnscheduled(testRunner.getProcessContext());
consumeMQTT.onStopped(testRunner.getProcessContext());
testRunner.assertTransferCount(ConsumeMQTT.REL_MESSAGE, 1);
assertProvenanceEvents(1);
final List<MockFlowFile> flowFiles = testRunner.getFlowFilesForRelationship(ConsumeMQTT.REL_MESSAGE);
final MockFlowFile flowFile = flowFiles.getFirst();
flowFile.assertContentEquals("testMessage");
flowFile.assertAttributeEquals(BROKER_ATTRIBUTE_KEY, BROKER_URI);
flowFile.assertAttributeEquals(TOPIC_ATTRIBUTE_KEY, TOPIC_NAME);
flowFile.assertAttributeEquals(QOS_ATTRIBUTE_KEY, "2");
flowFile.assertAttributeEquals(IS_DUPLICATE_ATTRIBUTE_KEY, "false");
flowFile.assertAttributeEquals(IS_RETAINED_ATTRIBUTE_KEY, "false");
}
@Test
public void testResizeBuffer() throws Exception {
mqttTestClient = new MqttTestClient(MqttTestClient.ConnectType.Subscriber);
testRunner = initializeTestRunner(mqttTestClient);
testRunner.setProperty(ConsumeMQTT.PROP_QOS, "2");
testRunner.setProperty(ConsumeMQTT.PROP_MAX_QUEUE_SIZE, "2");
testRunner.assertValid();
final ConsumeMQTT consumeMQTT = (ConsumeMQTT) testRunner.getProcessor();
consumeMQTT.onScheduled(testRunner.getProcessContext());
reconnect(consumeMQTT, testRunner.getProcessContext());
Thread.sleep(PUBLISH_WAIT_MS);
assertTrue(isConnected(consumeMQTT));
publishMessage(STRING_MESSAGE, EXACTLY_ONCE);
publishMessage(STRING_MESSAGE, EXACTLY_ONCE);
Thread.sleep(PUBLISH_WAIT_MS);
consumeMQTT.onUnscheduled(testRunner.getProcessContext());
testRunner.setProperty(ConsumeMQTT.PROP_MAX_QUEUE_SIZE, "1");
testRunner.assertNotValid();
testRunner.setProperty(ConsumeMQTT.PROP_MAX_QUEUE_SIZE, "3");
testRunner.assertValid();
testRunner.run(1);
testRunner.assertTransferCount(ConsumeMQTT.REL_MESSAGE, 2);
assertProvenanceEvents(2);
final List<MockFlowFile> flowFiles = testRunner.getFlowFilesForRelationship(ConsumeMQTT.REL_MESSAGE);
final MockFlowFile flowFile = flowFiles.getFirst();
flowFile.assertContentEquals("testMessage");
flowFile.assertAttributeEquals(BROKER_ATTRIBUTE_KEY, BROKER_URI);
flowFile.assertAttributeEquals(TOPIC_ATTRIBUTE_KEY, TOPIC_NAME);
flowFile.assertAttributeEquals(QOS_ATTRIBUTE_KEY, "2");
flowFile.assertAttributeEquals(IS_DUPLICATE_ATTRIBUTE_KEY, "false");
flowFile.assertAttributeEquals(IS_RETAINED_ATTRIBUTE_KEY, "false");
}
@Test
public void testConsumeRecordsWithAddedFields() throws Exception {
mqttTestClient = new MqttTestClient(MqttTestClient.ConnectType.Subscriber);
testRunner = initializeTestRunner(mqttTestClient);
testRunner.setProperty(ConsumeMQTT.RECORD_READER, createJsonRecordSetReaderService(testRunner));
testRunner.setProperty(ConsumeMQTT.RECORD_WRITER, createJsonRecordSetWriterService(testRunner));
testRunner.assertValid();
final ConsumeMQTT consumeMQTT = (ConsumeMQTT) testRunner.getProcessor();
consumeMQTT.onScheduled(testRunner.getProcessContext());
reconnect(consumeMQTT, testRunner.getProcessContext());
Thread.sleep(PUBLISH_WAIT_MS);
assertTrue(isConnected(consumeMQTT));
publishMessage(JSON_PAYLOAD, AT_MOST_ONCE);
publishMessage(THIS_IS_NOT_JSON, AT_MOST_ONCE);
publishMessage(JSON_PAYLOAD, AT_MOST_ONCE);
Thread.sleep(PUBLISH_WAIT_MS);
testRunner.run(1, false, false);
final List<MockFlowFile> flowFiles = testRunner.getFlowFilesForRelationship(ConsumeMQTT.REL_MESSAGE);
assertEquals(1, flowFiles.size());
assertEquals("[{\"name\":\"Apache NiFi\",\"_topic\":\"test/topic\",\"_topicSegments\":[\"test\",\"topic\"],\"_qos\":0,\"_isDuplicate\":false,\"_isRetained\":false},"
+ "{\"name\":\"Apache NiFi\",\"_topic\":\"test/topic\",\"_topicSegments\":[\"test\",\"topic\"],\"_qos\":0,\"_isDuplicate\":false,\"_isRetained\":false}]",
new String(flowFiles.getFirst().toByteArray()));
final List<MockFlowFile> badFlowFiles = testRunner.getFlowFilesForRelationship(ConsumeMQTT.REL_PARSE_FAILURE);
assertEquals(1, badFlowFiles.size());
assertEquals(THIS_IS_NOT_JSON, new String(badFlowFiles.getFirst().toByteArray()));
}
@Test
public void testConsumeDemarcator() throws Exception {
mqttTestClient = new MqttTestClient(MqttTestClient.ConnectType.Subscriber);
testRunner = initializeTestRunner(mqttTestClient);
testRunner.setProperty(ConsumeMQTT.MESSAGE_DEMARCATOR, "\\n");
testRunner.assertValid();
final ConsumeMQTT consumeMQTT = (ConsumeMQTT) testRunner.getProcessor();
consumeMQTT.onScheduled(testRunner.getProcessContext());
reconnect(consumeMQTT, testRunner.getProcessContext());
Thread.sleep(PUBLISH_WAIT_MS);
assertTrue(isConnected(consumeMQTT));
publishMessage(JSON_PAYLOAD, AT_MOST_ONCE);
publishMessage(THIS_IS_NOT_JSON, AT_MOST_ONCE);
publishMessage(JSON_PAYLOAD, AT_MOST_ONCE);
Thread.sleep(PUBLISH_WAIT_MS);
Thread.sleep(PUBLISH_WAIT_MS);
testRunner.run(1, false, false);
final List<MockFlowFile> flowFiles = testRunner.getFlowFilesForRelationship(ConsumeMQTT.REL_MESSAGE);
assertEquals(flowFiles.size(), 1);
assertEquals("{\"name\":\"Apache NiFi\"}\\n"
+ THIS_IS_NOT_JSON + "\\n"
+ "{\"name\":\"Apache NiFi\"}",
new String(flowFiles.getFirst().toByteArray()));
final List<MockFlowFile> badFlowFiles = testRunner.getFlowFilesForRelationship(ConsumeMQTT.REL_PARSE_FAILURE);
assertEquals(0, badFlowFiles.size());
}
@Test
public void testConsumeRecordsWithoutAddedFields() throws Exception {
mqttTestClient = new MqttTestClient(MqttTestClient.ConnectType.Subscriber);
testRunner = initializeTestRunner(mqttTestClient);
testRunner.setProperty(ConsumeMQTT.RECORD_READER, createJsonRecordSetReaderService(testRunner));
testRunner.setProperty(ConsumeMQTT.RECORD_WRITER, createJsonRecordSetWriterService(testRunner));
testRunner.setProperty(ConsumeMQTT.ADD_ATTRIBUTES_AS_FIELDS, "false");
testRunner.assertValid();
final ConsumeMQTT consumeMQTT = (ConsumeMQTT) testRunner.getProcessor();
consumeMQTT.onScheduled(testRunner.getProcessContext());
reconnect(consumeMQTT, testRunner.getProcessContext());
Thread.sleep(PUBLISH_WAIT_MS);
assertTrue(isConnected(consumeMQTT));
publishMessage(JSON_PAYLOAD, AT_LEAST_ONCE);
publishMessage(THIS_IS_NOT_JSON, AT_LEAST_ONCE);
publishMessage(JSON_PAYLOAD, AT_LEAST_ONCE);
Thread.sleep(PUBLISH_WAIT_MS);
testRunner.run(1, false, false);
final List<MockFlowFile> flowFiles = testRunner.getFlowFilesForRelationship(ConsumeMQTT.REL_MESSAGE);
assertEquals(1, flowFiles.size());
assertEquals("[{\"name\":\"Apache NiFi\"},{\"name\":\"Apache NiFi\"}]", new String(flowFiles.getFirst().toByteArray()));
final List<MockFlowFile> badFlowFiles = testRunner.getFlowFilesForRelationship(ConsumeMQTT.REL_PARSE_FAILURE);
assertEquals(1, badFlowFiles.size());
assertEquals(THIS_IS_NOT_JSON, new String(badFlowFiles.getFirst().toByteArray()));
}
@Test
public void testConsumeRecordsOnlyBadData() throws Exception {
mqttTestClient = new MqttTestClient(MqttTestClient.ConnectType.Subscriber);
testRunner = initializeTestRunner(mqttTestClient);
testRunner.setProperty(ConsumeMQTT.RECORD_READER, createJsonRecordSetReaderService(testRunner));
testRunner.setProperty(ConsumeMQTT.RECORD_WRITER, createJsonRecordSetWriterService(testRunner));
testRunner.setProperty(ConsumeMQTT.ADD_ATTRIBUTES_AS_FIELDS, "false");
testRunner.assertValid();
final ConsumeMQTT consumeMQTT = (ConsumeMQTT) testRunner.getProcessor();
consumeMQTT.onScheduled(testRunner.getProcessContext());
reconnect(consumeMQTT, testRunner.getProcessContext());
Thread.sleep(PUBLISH_WAIT_MS);
assertTrue(isConnected(consumeMQTT));
publishMessage(THIS_IS_NOT_JSON, EXACTLY_ONCE);
Thread.sleep(PUBLISH_WAIT_MS);
testRunner.run(1, false, false);
final List<MockFlowFile> badFlowFiles = testRunner.getFlowFilesForRelationship(ConsumeMQTT.REL_PARSE_FAILURE);
assertEquals(1, badFlowFiles.size());
assertEquals(THIS_IS_NOT_JSON, new String(badFlowFiles.getFirst().toByteArray()));
}
@Test
public void testSslContextService() throws InitializationException {
testRunner = initializeTestRunner();
testRunner.setEnvironmentVariableValue("brokerURI", "ssl://localhost:8883");
testRunner.setProperty(ConsumeMQTT.PROP_BROKER_URI, "${brokerURI}");
final String identifier = addSSLContextService(testRunner);
testRunner.setProperty(ConsumeMQTT.PROP_SSL_CONTEXT_SERVICE, identifier);
final ConsumeMQTT processor = (ConsumeMQTT) testRunner.getProcessor();
processor.onScheduled(testRunner.getProcessContext());
}
@Test
public void testMessageNotConsumedOnCommitFail() throws NoSuchFieldException, IllegalAccessException {
mqttTestClient = new MqttTestClient(MqttTestClient.ConnectType.Subscriber);
testRunner = initializeTestRunner(mqttTestClient);
testRunner.run(1, false);
final ConsumeMQTT processor = (ConsumeMQTT) testRunner.getProcessor();
final ReceivedMqttMessage mock = mock(ReceivedMqttMessage.class);
when(mock.getPayload()).thenReturn(new byte[0]);
when(mock.getTopic()).thenReturn(TOPIC_NAME);
final BlockingQueue<ReceivedMqttMessage> mqttQueue = getMqttQueue(processor);
mqttQueue.add(mock);
final ProcessSession session = testRunner.getProcessSessionFactory().createSession();
assertThrows(InvocationTargetException.class, () -> transferQueue(processor,
(ProcessSession) Proxy.newProxyInstance(getClass().getClassLoader(), new Class[]{ProcessSession.class}, (proxy, method, args) -> {
if (method.getName().equals("commitAsync")) {
throw new RuntimeException();
} else {
return method.invoke(session, args);
}
})));
assertTrue(mqttQueue.contains(mock));
}
@Test
void addTopicAttributesWithMultipleTopicSegments() {
final String topic = "home/livingroom/temperature";
final Map<String, String> attributes = new HashMap<>();
new ConsumeMQTT().addTopicAttributes(attributes, topic);
assertEquals(4, attributes.size(), "Expected 4 attributes (1 for full topic + 3 for segments)");
assertEquals(topic, attributes.get("mqtt.topic"), "Full topic should be present");
assertEquals("home", attributes.get("mqtt.topic.segment.0"));
assertEquals("livingroom", attributes.get("mqtt.topic.segment.1"));
assertEquals("temperature", attributes.get("mqtt.topic.segment.2"));
assertNull(attributes.get("mqtt.topic.segment.3"), "No further segments expected");
}
@Test
void addTopicAttributesWithLeadingSlashInTopic() {
final String topic = "/sensors/light";
final Map<String, String> attributes = new HashMap<>();
new ConsumeMQTT().addTopicAttributes(attributes, topic);
assertEquals(4, attributes.size(), "Expected 4 attributes (1 for full topic + 3 for segments)");
assertEquals(topic, attributes.get("mqtt.topic"), "Full topic should be present");
assertEquals("", attributes.get("mqtt.topic.segment.0"), "Segment 0 should be empty for leading slash");
assertEquals("sensors", attributes.get("mqtt.topic.segment.1"));
assertEquals("light", attributes.get("mqtt.topic.segment.2"));
assertNull(attributes.get("mqtt.topic.segment.3"), "No further segments expected");
}
@Test
void addTopicAttributesWithTrailingSlashInTopicName() {
final String topic = "data/device/";
final Map<String, String> attributes = new HashMap<>();
new ConsumeMQTT().addTopicAttributes(attributes, topic);
assertEquals(4, attributes.size(), "Expected 4 attributes (1 for full topic + 3 for segments)");
assertEquals(topic, attributes.get("mqtt.topic"), "Full topic should be present");
assertEquals("data", attributes.get("mqtt.topic.segment.0"));
assertEquals("device", attributes.get("mqtt.topic.segment.1"));
assertEquals("", attributes.get("mqtt.topic.segment.2"), "Segment 2 should be empty for trailing slash");
assertNull(attributes.get("mqtt.topic.segment.3"), "No further segments expected");
}
@Test
void addTopicAttributesWithSingleSlashAsTopic() {
final String topic = "/";
final Map<String, String> attributes = new HashMap<>();
new ConsumeMQTT().addTopicAttributes(attributes, topic);
assertEquals(3, attributes.size(), "Expected 3 attributes (1 for full topic + 2 for segments)");
assertEquals(topic, attributes.get("mqtt.topic"), "Full topic should be present");
assertEquals("", attributes.get("mqtt.topic.segment.0"));
assertEquals("", attributes.get("mqtt.topic.segment.1"));
assertNull(attributes.get("mqtt.topic.segment.2"), "No further segments expected");
}
@Test
void addTopicAttributes_consecutiveSlashesTopic_addsCorrectly() {
final String topic = "status//alerts";
final Map<String, String> attributes = new HashMap<>();
new ConsumeMQTT().addTopicAttributes(attributes, topic);
assertEquals(4, attributes.size(), "Expected 4 attributes (1 for full topic + 3 for segments)");
assertEquals(topic, attributes.get("mqtt.topic"), "Full topic should be present");
assertEquals("status", attributes.get("mqtt.topic.segment.0"));
assertEquals("", attributes.get("mqtt.topic.segment.1"));
assertEquals("alerts", attributes.get("mqtt.topic.segment.2"));
assertNull(attributes.get("mqtt.topic.segment.3"), "No further segments expected");
}
@Test
void addTopicAttributesWithEmptyTopic() {
final String topic = "";
final Map<String, String> attributes = new HashMap<>();
new ConsumeMQTT().addTopicAttributes(attributes, topic);
assertEquals(1, attributes.size(), "Expected only 1 attribute (for full topic)");
assertEquals(topic, attributes.get("mqtt.topic"), "Full topic should be present and empty");
assertNull(attributes.get("mqtt.topic.segment.0"), "No segments should be added for empty topic");
}
@Test
void addTopicAttributesWithNullTopic() {
final String topic = null;
final Map<String, String> attributes = new HashMap<>();
new ConsumeMQTT().addTopicAttributes(attributes, topic);
assertEquals(1, attributes.size(), "Expected only 1 attribute (for full topic)");
assertNull(attributes.get("mqtt.topic"), "Full topic should be null");
assertNull(attributes.get("mqtt.topic.segment.0"), "No segments should be added for null topic");
}
@Test
void addTopicAttributesWithTopicWithoutSlashes() {
final String topic = "sensors";
final Map<String, String> attributes = new HashMap<>();
new ConsumeMQTT().addTopicAttributes(attributes, topic);
assertEquals(2, attributes.size(), "Expected 2 attributes (1 for full topic + 1 for segment)");
assertEquals(topic, attributes.get("mqtt.topic"), "Full topic should be present");
assertEquals("sensors", attributes.get("mqtt.topic.segment.0"));
assertNull(attributes.get("mqtt.topic.segment.1"), "No further segments expected");
}
private TestRunner initializeTestRunner() {
if (mqttTestClient != null) {
throw new IllegalStateException("mqttTestClient should be null, using ConsumeMQTT's default client!");
}
final TestRunner testRunner = TestRunners.newTestRunner(ConsumeMQTT.class);
setCommonProperties(testRunner);
return testRunner;
}
private TestRunner initializeTestRunner(MqttTestClient mqttTestClient) {
final TestRunner testRunner = TestRunners.newTestRunner(new ConsumeMQTT() {
@Override
protected MqttClient createMqttClient() {
return mqttTestClient;
}
});
setCommonProperties(testRunner);
return testRunner;
}
private void setCommonProperties(TestRunner testRunner) {
testRunner.setProperty(ConsumeMQTT.PROP_BROKER_URI, BROKER_URI);
testRunner.setProperty(ConsumeMQTT.PROP_CLIENTID, CLIENT_ID);
testRunner.setProperty(ConsumeMQTT.PROP_TOPIC_FILTER, TOPIC_NAME);
testRunner.setProperty(ConsumeMQTT.PROP_MAX_QUEUE_SIZE, INTERNAL_QUEUE_SIZE);
}
private static boolean isConnected(AbstractMQTTProcessor processor) throws NoSuchFieldException, IllegalAccessException {
final Field f = AbstractMQTTProcessor.class.getDeclaredField("mqttClient");
f.setAccessible(true);
final MqttClient mqttClient = (MqttClient) f.get(processor);
return mqttClient.isConnected();
}
public static void reconnect(ConsumeMQTT processor, ProcessContext context) throws IllegalAccessException, NoSuchMethodException, InvocationTargetException {
final Method method = ConsumeMQTT.class.getDeclaredMethod("initializeClient", ProcessContext.class);
method.setAccessible(true);
method.invoke(processor, context);
}
@SuppressWarnings("unchecked")
public static BlockingQueue<ReceivedMqttMessage> getMqttQueue(ConsumeMQTT consumeMQTT) throws IllegalAccessException, NoSuchFieldException {
final Field mqttQueueField = ConsumeMQTT.class.getDeclaredField("mqttQueue");
mqttQueueField.setAccessible(true);
return (BlockingQueue<ReceivedMqttMessage>) mqttQueueField.get(consumeMQTT);
}
public static void transferQueue(ConsumeMQTT consumeMQTT, ProcessSession session) throws NoSuchMethodException, InvocationTargetException, IllegalAccessException {
final Method transferQueue = ConsumeMQTT.class.getDeclaredMethod("transferQueue", ProcessSession.class);
transferQueue.setAccessible(true);
transferQueue.invoke(consumeMQTT, session);
}
private void assertProvenanceEvents(int count) {
final List<ProvenanceEventRecord> provenanceEvents = testRunner.getProvenanceEvents();
assertNotNull(provenanceEvents);
assertEquals(count, provenanceEvents.size());
if (count > 0) {
assertEquals(ProvenanceEventType.RECEIVE, provenanceEvents.getFirst().getEventType());
}
}
private void publishMessage(final String payload, final int qos) {
final StandardMqttMessage message = new StandardMqttMessage(payload.getBytes(StandardCharsets.UTF_8), qos, false);
mqttTestClient.publish(TOPIC_NAME, message);
}
private static String addSSLContextService(TestRunner testRunner) throws InitializationException {
final SSLContextService sslContextService = mock(SSLContextService.class);
final String identifier = SSLContextService.class.getSimpleName();
when(sslContextService.getIdentifier()).thenReturn(identifier);
testRunner.addControllerService(identifier, sslContextService);
testRunner.enableControllerService(sslContextService);
return identifier;
}
}
|
apache/pig | 37,430 | src/org/apache/pig/backend/hadoop/executionengine/tez/plan/optimizer/UnionOptimizer.java | /**
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.pig.backend.hadoop.executionengine.tez.plan.optimizer;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.Collections;
import java.util.HashSet;
import java.util.LinkedList;
import java.util.List;
import java.util.Map.Entry;
import java.util.Set;
import org.apache.commons.logging.Log;
import org.apache.commons.logging.LogFactory;
import org.apache.pig.PigConfiguration;
import org.apache.pig.StoreFunc;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.PhysicalOperator;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.expressionOperators.POUserFunc;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.plans.PhysicalPlan;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POSplit;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.relationalOperators.POStore;
import org.apache.pig.backend.hadoop.executionengine.physicalLayer.util.PlanHelper;
import org.apache.pig.backend.hadoop.executionengine.tez.plan.TezEdgeDescriptor;
import org.apache.pig.backend.hadoop.executionengine.tez.plan.TezOpPlanVisitor;
import org.apache.pig.backend.hadoop.executionengine.tez.plan.TezOperPlan;
import org.apache.pig.backend.hadoop.executionengine.tez.plan.TezOperator;
import org.apache.pig.backend.hadoop.executionengine.tez.plan.TezOperator.OPER_FEATURE;
import org.apache.pig.backend.hadoop.executionengine.tez.plan.TezOperator.VertexGroupInfo;
import org.apache.pig.backend.hadoop.executionengine.tez.plan.operator.POStoreTez;
import org.apache.pig.backend.hadoop.executionengine.tez.plan.operator.POValueOutputTez;
import org.apache.pig.backend.hadoop.executionengine.tez.plan.udf.ReadScalarsTez;
import org.apache.pig.backend.hadoop.executionengine.tez.runtime.HashValuePartitioner;
import org.apache.pig.backend.hadoop.executionengine.tez.runtime.TezInput;
import org.apache.pig.backend.hadoop.executionengine.tez.runtime.TezOutput;
import org.apache.pig.backend.hadoop.executionengine.tez.util.TezCompilerUtil;
import org.apache.pig.backend.hadoop.hbase.HBaseStorage;
import org.apache.pig.builtin.AvroStorage;
import org.apache.pig.builtin.JsonStorage;
import org.apache.pig.builtin.OrcStorage;
import org.apache.pig.builtin.PigStorage;
import org.apache.pig.builtin.mock.Storage;
import org.apache.pig.impl.plan.OperatorKey;
import org.apache.pig.impl.plan.PlanException;
import org.apache.pig.impl.plan.ReverseDependencyOrderWalker;
import org.apache.pig.impl.plan.VisitorException;
import org.apache.tez.dag.api.EdgeProperty.DataMovementType;
import org.apache.tez.runtime.library.input.UnorderedKVInput;
import org.apache.tez.runtime.library.output.UnorderedPartitionedKVOutput;
/**
* Optimizes union by removing the intermediate union vertex and making the
* successor get input from the predecessor vertices directly using VertexGroup.
* This should be run after MultiQueryOptimizer so that it handles cases like
* union followed by split and then store.
*
* For eg:
* 1) Union followed by store
* Vertex 1 (Load), Vertex 2 (Load) -> Vertex 3 (Union + Store) will be optimized to
* Vertex 1 (Load + Store), Vertex 2 (Load + Store). Both the vertices will be writing output
* to same store location directly which is supported by Tez.
* 2) Union followed by groupby
* Vertex 1 (Load), Vertex 2 (Load) -> Vertex 3 (Union + POLocalRearrange) -> Vertex 4 (Group by)
* will be optimized to Vertex 1 (Load + POLR), Vertex 2 (Load + POLR) -> Vertex 4 (Group by)
*
*/
public class UnionOptimizer extends TezOpPlanVisitor {
private static final Log LOG = LogFactory.getLog(UnionOptimizer.class);
private TezOperPlan tezPlan;
private List<String> supportedStoreFuncs;
private List<String> unsupportedStoreFuncs;
public UnionOptimizer(TezOperPlan plan, List<String> supportedStoreFuncs, List<String> unsupportedStoreFuncs) {
super(plan, new ReverseDependencyOrderWalker<TezOperator, TezOperPlan>(plan));
tezPlan = plan;
this.supportedStoreFuncs = supportedStoreFuncs;
this.unsupportedStoreFuncs = unsupportedStoreFuncs;
}
public static boolean isOptimizable(TezOperator tezOp) throws VisitorException {
if((tezOp.isLimit() || tezOp.isLimitAfterSort()) && tezOp.getRequestedParallelism() == 1) {
return false;
}
// If user has specified a PARALLEL clause with the union operator
// turn off union optimization
if (tezOp.getRequestedParallelism() != -1) {
return false;
}
// Two vertices separately ranking with 1 to n and writing to output directly
// will make each rank repeate twice which is wrong. Rank always needs to be
// done from single vertex to have the counting correct.
if (tezOp.isRankCounter()) {
return false;
}
return true;
}
public static boolean isOptimizableStoreFunc(TezOperator tezOp,
List<String> supportedStoreFuncs, List<String> unsupportedStoreFuncs)
throws VisitorException {
List<POStoreTez> stores = PlanHelper.getPhysicalOperators(tezOp.plan, POStoreTez.class);
// If any store function does not support parallel writes, then we cannot use this optimization
for (POStoreTez store : stores) {
String name = store.getStoreFunc().getClass().getName();
Boolean supportsParallelWriteToStoreLocation = store.getStoreFunc().supportsParallelWriteToStoreLocation();
// We process exclusions first, then inclusions. This way, a user can explicitly disable parallel stores
// for a UDF that claims to support it, but cannot enable parallel stores for a UDF that claims not to.
//
// Logical flow:
// 1) If the store function is explicitly listed as unsupported, then return false
// 2) If the store function specifies itself as unsupported, then return false
// 3) If the store function specifies itself as supported, then continue (true case)
// 4) If the store function is explicitly listed as support, then continue (true case)
// 5) Otherwise, return false
if (unsupportedStoreFuncs != null && unsupportedStoreFuncs.contains(name)) {
LOG.warn(name + " does not support union optimization."
+ " Disabling it. There will be some performance degradation.");
return false;
}
if (supportsParallelWriteToStoreLocation != null) {
if (supportsParallelWriteToStoreLocation) {
continue;
} else {
LOG.warn(name + " does not support union optimization."
+ " Disabling it. There will be some performance degradation.");
return false;
}
}
if (supportedStoreFuncs != null && supportedStoreFuncs.contains(name)) {
continue;
}
LOG.warn(PigConfiguration.PIG_TEZ_OPT_UNION_SUPPORTED_STOREFUNCS
+ " does not contain " + name
+ " and so disabling union optimization. There will be some performance degradation. "
+ "If your storefunc does not hardcode part file names and can work with multiple vertices writing to the output location,"
+ " run pig with -D"
+ PigConfiguration.PIG_TEZ_OPT_UNION_SUPPORTED_STOREFUNCS
+ "=<Comma separated list of fully qualified StoreFunc class names> to enable the optimization. Refer PIG-4691");
return false;
}
return true;
}
@Override
public void visitTezOp(TezOperator tezOp) throws VisitorException {
if (!tezOp.isUnion()) {
return;
}
if (!isOptimizable(tezOp)) {
return;
}
TezOperator unionOp = tezOp;
String scope = unionOp.getOperatorKey().scope;
PhysicalPlan unionOpPlan = unionOp.plan;
Set<OperatorKey> uniqueUnionMembers = new HashSet<OperatorKey>(unionOp.getUnionMembers());
List<TezOperator> predecessors = new ArrayList<TezOperator>(tezPlan.getPredecessors(unionOp));
List<TezOperator> successors = tezPlan.getSuccessors(unionOp) == null ? null
: new ArrayList<TezOperator>(tezPlan.getSuccessors(unionOp));
if (uniqueUnionMembers.size() != 1) {
if (!isOptimizableStoreFunc(tezOp, supportedStoreFuncs, unsupportedStoreFuncs)) {
return;
}
if (successors != null) {
for (TezOperator succ : successors) {
for (TezOperator pred : predecessors) {
if (succ.inEdges.containsKey(pred.getOperatorKey())) {
// Stop here, we cannot convert the node into vertex group
// Otherwise, we will end up with a parallel edge between pred
// and succ
return;
}
}
}
}
// TODO: PIG-3856 Handle replicated join and skewed join sample.
// Replicate join small table/skewed join sample that was broadcast to union vertex
// now needs to be broadcast to all the union predecessors. How do we do that??
// Wait for shared edge and do it or write multiple times??
// For now don't optimize except in the case of Split where we need to write only once
if (predecessors.size() > unionOp.getUnionMembers().size()) {
return;
}
}
if (uniqueUnionMembers.size() == 1) {
// We actually don't need VertexGroup in this case. The multiple
// sub-plans of Split can write to same MROutput or the Tez LogicalOutput
OperatorKey splitPredKey = uniqueUnionMembers.iterator().next();
TezOperator splitPredOp = tezPlan.getOperator(splitPredKey);
PhysicalPlan splitPredPlan = splitPredOp.plan;
if (splitPredPlan.getLeaves().get(0) instanceof POSplit) { //It has to be. But check anyways
for( TezOperator op : predecessors ) {
if( !op.getOperatorKey().equals(splitPredKey)) {
Set<TezOperator> allNonMemberPredecessorsAncestors = new HashSet<TezOperator>();
TezCompilerUtil.addAllPredecessors(tezPlan, op, allNonMemberPredecessorsAncestors);
// If any of the nonMemberPredecessor's ancestors(recursive predecessor)
// is from the single unionmember, then we stop the merge effort to avoid creating
// an illegal loop.
if( allNonMemberPredecessorsAncestors.contains(splitPredOp) ) {
return;
}
}
}
try {
connectUnionNonMemberPredecessorsToSplit(unionOp, splitPredOp, predecessors);
// Remove POShuffledValueInputTez from union plan root
unionOpPlan.remove(unionOpPlan.getRoots().get(0));
// Clone union plan into split subplans
for (int i=0; i < Collections.frequency(unionOp.getUnionMembers(), splitPredKey); i++ ) {
cloneAndMergeUnionPlan(unionOp, splitPredOp);
}
copyOperatorProperties(splitPredOp, unionOp);
tezPlan.disconnect(splitPredOp, unionOp);
connectSplitOpToUnionSuccessors(unionOp, splitPredOp, successors);
} catch (PlanException e) {
throw new VisitorException(e);
}
//Remove union operator from the plan
tezPlan.remove(unionOp);
return;
} else {
throw new VisitorException("Expected POSplit but found " + splitPredPlan.getLeaves().get(0));
}
}
// Create vertex group operator for each store. Union followed by Split
// followed by Store could have multiple stores
List<POStoreTez> unionStoreOutputs = PlanHelper.getPhysicalOperators(unionOpPlan, POStoreTez.class);
TezOperator[] storeVertexGroupOps = new TezOperator[unionStoreOutputs.size()];
for (int i=0; i < storeVertexGroupOps.length; i++) {
TezOperator existingVertexGroup = null;
if (successors != null) {
for (TezOperator succ : successors) {
if (succ.isVertexGroup() && unionStoreOutputs.get(i).getSFile().equals(succ.getVertexGroupInfo().getSFile())) {
existingVertexGroup = succ;
break;
}
}
}
if (existingVertexGroup == null) {
// In the case of union + split + union + store, the different stores in the Split
// will be writing to same location after second union operator is optimized.
// So while optimizing the first union, we should just make it write to one vertex group
for (int j = 0; j < i; j++) {
if (unionStoreOutputs.get(i).getSFile().equals(storeVertexGroupOps[j].getVertexGroupInfo().getSFile())) {
storeVertexGroupOps[i] = storeVertexGroupOps[j];
break;
}
}
if (storeVertexGroupOps[i] != null) {
continue;
}
storeVertexGroupOps[i] = new TezOperator(OperatorKey.genOpKey(scope));
storeVertexGroupOps[i].setVertexGroupInfo(new VertexGroupInfo(unionStoreOutputs.get(i)));
storeVertexGroupOps[i].getVertexGroupInfo().setSFile(unionStoreOutputs.get(i).getSFile());
storeVertexGroupOps[i].setVertexGroupMembers(new ArrayList<OperatorKey>(unionOp.getUnionMembers()));
tezPlan.add(storeVertexGroupOps[i]);
} else {
storeVertexGroupOps[i] = existingVertexGroup;
existingVertexGroup.getVertexGroupMembers().remove(unionOp.getOperatorKey());
existingVertexGroup.getVertexGroupMembers().addAll(unionOp.getUnionMembers());
existingVertexGroup.getVertexGroupInfo().removeInput(unionOp.getOperatorKey());
}
}
// Create vertex group operator for each output. Case of split, orderby,
// skewed join, rank, etc will have multiple outputs
List<TezOutput> unionOutputs = PlanHelper.getPhysicalOperators(unionOpPlan, TezOutput.class);
// One TezOutput can write to multiple LogicalOutputs (POCounterTez, POValueOutputTez, etc)
List<String> unionOutputKeys = new ArrayList<String>();
for (TezOutput output : unionOutputs) {
if (output instanceof POStoreTez) {
continue;
}
for (String key : output.getTezOutputs()) {
unionOutputKeys.add(key);
}
}
TezOperator[] outputVertexGroupOps = new TezOperator[unionOutputKeys.size()];
String[] newOutputKeys = new String[unionOutputKeys.size()];
for (int i=0; i < outputVertexGroupOps.length; i++) {
TezOperator existingVertexGroup = null;
if (successors != null) {
for (TezOperator succ : successors) {
if (succ.isVertexGroup()
&& unionOutputKeys.get(i).equals(succ.getVertexGroupInfo().getOutput()) ) {
existingVertexGroup = succ;
break;
}
}
}
if (existingVertexGroup == null) {
for (int j = 0; j < i; j++) {
if (unionOutputKeys.get(i).equals(unionOutputKeys.get(j))) {
outputVertexGroupOps[i] = outputVertexGroupOps[j];
break;
}
}
if (outputVertexGroupOps[i] != null) {
continue;
}
outputVertexGroupOps[i] = new TezOperator(OperatorKey.genOpKey(scope));
outputVertexGroupOps[i].setVertexGroupInfo(new VertexGroupInfo());
outputVertexGroupOps[i].getVertexGroupInfo().setOutput(unionOutputKeys.get(i));
outputVertexGroupOps[i].setVertexGroupMembers(new ArrayList<OperatorKey>(unionOp.getUnionMembers()));
} else {
outputVertexGroupOps[i] = existingVertexGroup;
existingVertexGroup.getVertexGroupMembers().remove(unionOp.getOperatorKey());
existingVertexGroup.getVertexGroupMembers().addAll(unionOp.getUnionMembers());
existingVertexGroup.getVertexGroupInfo().removeInput(unionOp.getOperatorKey());
}
newOutputKeys[i] = outputVertexGroupOps[i].getOperatorKey().toString();
tezPlan.add(outputVertexGroupOps[i]);
}
// Change plan from Predecessors -> Union -> Successor(s) to
// Predecessors -> Vertex Group(s) -> Successor(s)
try {
// Remove POShuffledValueInputTez from union plan root
unionOpPlan.remove(unionOpPlan.getRoots().get(0));
for (OperatorKey predKey : unionOp.getUnionMembers()) {
TezOperator pred = tezPlan.getOperator(predKey);
PhysicalPlan clonePlan = cloneAndMergeUnionPlan(unionOp, pred);
connectPredecessorsToVertexGroups(unionOp, pred, clonePlan,
storeVertexGroupOps, outputVertexGroupOps);
}
connectVertexGroupsToSuccessors(unionOp, successors,
unionOutputKeys, outputVertexGroupOps);
replaceSuccessorInputsAndDisconnect(unionOp, successors, unionOutputKeys, newOutputKeys);
//Remove union operator from the plan
tezPlan.remove(unionOp);
} catch (VisitorException e) {
throw e;
} catch (Exception e) {
throw new VisitorException(e);
}
}
/**
* Connect the predecessors of the union which are not members of the union
* (usually FRJoin replicated table orSkewedJoin sample) to the Split op
* which is the only member of the union. Disconnect those predecessors from the union.
*
* Replace the output keys of those predecessors with the split operator
* key instead of the union operator key.
*
* @param unionOp Union operator
* @param splitPredOp Split operator which is the only member of the union and its predecessor
* @param unionPredecessors Predecessors of the union including the split operator
* @throws PlanException
* @throws VisitorException
*/
private void connectUnionNonMemberPredecessorsToSplit(TezOperator unionOp,
TezOperator splitPredOp,
List<TezOperator> unionPredecessors) throws PlanException, VisitorException {
String unionOpKey = unionOp.getOperatorKey().toString();
OperatorKey splitPredKey = splitPredOp.getOperatorKey();
for (TezOperator pred : unionPredecessors) {
if (!pred.getOperatorKey().equals(splitPredKey)) { //Skip splitPredOp which is also a predecessor
// Get actual predecessors if predecessor is a vertex group
TezOperator predVertexGroup = null;
List<TezOperator> actualPreds = new ArrayList<TezOperator>();
if (pred.isVertexGroup()) {
predVertexGroup = pred;
for (OperatorKey opKey : pred.getVertexGroupMembers()) {
// There should not be multiple levels of vertex group. So no recursion required.
actualPreds.add(tezPlan.getOperator(opKey));
}
tezPlan.disconnect(predVertexGroup, unionOp);
tezPlan.connect(predVertexGroup, splitPredOp);
} else {
actualPreds.add(pred);
}
for (TezOperator actualPred : actualPreds) {
TezCompilerUtil.replaceOutput(actualPred, unionOpKey, splitPredKey.toString());
TezEdgeDescriptor edge = actualPred.outEdges.remove(unionOp.getOperatorKey());
if (edge == null) {
throw new VisitorException("Edge description is empty");
}
actualPred.outEdges.put(splitPredKey, edge);
splitPredOp.inEdges.put(actualPred.getOperatorKey(), edge);
if (predVertexGroup == null) {
// Disconnect FRJoin table/SkewedJoin sample edge to
// union op and connect to POSplit
tezPlan.disconnect(actualPred, unionOp);
tezPlan.connect(actualPred, splitPredOp);
}
}
}
}
}
/**
* Connect the split operator to the successors of the union operators and update the edges.
* Also change the inputs of the successor from the union operator to the split operator.
*
* @param unionOp Union operator
* @param splitPredOp Split operator which is the only member of the union
* @param successors Successors of the union operator
* @throws PlanException
* @throws VisitorException
*/
private void connectSplitOpToUnionSuccessors(TezOperator unionOp,
TezOperator splitPredOp, List<TezOperator> successors)
throws PlanException, VisitorException {
String unionOpKey = unionOp.getOperatorKey().toString();
String splitPredOpKey = splitPredOp.getOperatorKey().toString();
List<TezOperator> splitSuccessors = tezPlan.getSuccessors(splitPredOp);
if (successors != null) {
for (TezOperator succ : successors) {
TezOperator successorVertexGroup = null;
boolean removeSuccessorVertexGroup = false;
List<TezOperator> actualSuccs = new ArrayList<TezOperator>();
if (succ.isVertexGroup()) {
successorVertexGroup = succ;
if (tezPlan.getSuccessors(successorVertexGroup) != null) {
// There should not be multiple levels of vertex group. So no recursion required.
actualSuccs.addAll(tezPlan.getSuccessors(successorVertexGroup));
}
int index = succ.getVertexGroupMembers().indexOf(unionOp.getOperatorKey());
while (index > -1) {
succ.getVertexGroupMembers().set(index, splitPredOp.getOperatorKey());
index = succ.getVertexGroupMembers().indexOf(unionOp.getOperatorKey());
}
// Store vertex group
POStore store = successorVertexGroup.getVertexGroupInfo().getStore();
if (store != null) {
//Clone changes the operator keys
List<POStoreTez> storeOutputs = PlanHelper.getPhysicalOperators(splitPredOp.plan, POStoreTez.class);
for (POStoreTez storeOut : storeOutputs) {
if (storeOut.getOutputKey().equals(store.getOperatorKey().toString())) {
splitPredOp.addVertexGroupStore(storeOut.getOperatorKey(), successorVertexGroup.getOperatorKey());
}
}
}
tezPlan.disconnect(unionOp, successorVertexGroup);
Set<OperatorKey> uniqueVertexGroupMembers = new HashSet<OperatorKey>(succ.getVertexGroupMembers());
if (uniqueVertexGroupMembers.size() == 1) {
//Only splitPredOp is member of the vertex group. Get rid of the vertex group
removeSuccessorVertexGroup = true;
} else {
// Avoid connecting multiple times in case of union + self join
if (splitSuccessors == null || !splitSuccessors.contains(successorVertexGroup)) {
tezPlan.connect(splitPredOp, successorVertexGroup);
}
}
} else {
actualSuccs.add(succ);
}
// Store vertex group
if (actualSuccs.isEmpty() && removeSuccessorVertexGroup) {
splitPredOp.removeVertexGroupStore(successorVertexGroup.getOperatorKey());
tezPlan.remove(successorVertexGroup);
}
for (TezOperator actualSucc : actualSuccs) {
TezCompilerUtil.replaceInput(actualSucc, unionOpKey, splitPredOpKey);
TezEdgeDescriptor edge = actualSucc.inEdges.remove(unionOp.getOperatorKey());
if (edge == null) {
throw new VisitorException("Edge description is empty");
}
actualSucc.inEdges.put(splitPredOp.getOperatorKey(), edge);
splitPredOp.outEdges.put(actualSucc.getOperatorKey(), edge);
if (successorVertexGroup == null || removeSuccessorVertexGroup) {
if (removeSuccessorVertexGroup) {
// Changes plan from SplitOp -> Union -> VertexGroup - > Successor
// to SplitOp -> Successor
tezPlan.disconnect(successorVertexGroup, actualSucc);
tezPlan.remove(successorVertexGroup);
TezCompilerUtil.replaceInput(actualSucc, successorVertexGroup.getOperatorKey().toString(), splitPredOpKey);
} else {
// Changes plan from SplitOp -> Union -> Successor
// to SplitOp -> Successor
tezPlan.disconnect(unionOp, actualSucc);
}
// Avoid connecting multiple times in case of union + self join
if (splitSuccessors == null || !splitSuccessors.contains(actualSucc)) {
tezPlan.connect(splitPredOp, actualSucc);
}
}
}
}
}
}
/**
* Clone plan of union and merge it into the predecessor operator
*
* @param unionOp Union operator
* @param predOp Predecessor operator of union to which union plan should be merged to
*/
private PhysicalPlan cloneAndMergeUnionPlan(TezOperator unionOp, TezOperator predOp) throws VisitorException {
try {
PhysicalPlan predPlan = predOp.plan;
PhysicalOperator predLeaf = predPlan.getLeaves().get(0);
// if predLeaf not POValueOutputTez
if (predLeaf instanceof POSplit) {
// Find the subPlan that connects to the union operator
predPlan = getUnionPredPlanFromSplit(predPlan, unionOp.getOperatorKey().toString());
predLeaf = predPlan.getLeaves().get(0);
}
PhysicalPlan clonePlan = unionOp.plan.clone();
// Remove POValueOutputTez from predecessor leaf
predPlan.remove(predLeaf);
boolean isEmptyPlan = predPlan.isEmpty();
if (!isEmptyPlan) {
predLeaf = predPlan.getLeaves().get(0);
}
predPlan.merge(clonePlan);
if (!isEmptyPlan) {
predPlan.connect(predLeaf, clonePlan.getRoots().get(0));
}
return clonePlan;
} catch (Exception e) {
throw new VisitorException(e);
}
}
/**
* Connects the unionOp predecessor to the store vertex groups and the output vertex groups
* and disconnects it from the unionOp.
*
* @param pred Predecessor of union which will be made part of the vertex group
* @param unionOp Union operator
* @param predClonedUnionPlan Cloned plan of the union merged to the predecessor
* @param storeVertexGroupOps Store vertex groups to connect to
* @param outputVertexGroupOps Tez LogicalOutput vertex groups to connect to
*/
public void connectPredecessorsToVertexGroups(TezOperator unionOp,
TezOperator pred, PhysicalPlan predClonedUnionPlan,
TezOperator[] storeVertexGroupOps,
TezOperator[] outputVertexGroupOps) throws VisitorException,PlanException {
//Clone changes the operator keys
List<POStoreTez> clonedUnionStoreOutputs = PlanHelper.getPhysicalOperators(predClonedUnionPlan, POStoreTez.class);
// Connect predecessor to the storeVertexGroups
int i = 0;
for (TezOperator storeVertexGroup : storeVertexGroupOps) {
// Skip connecting if they are already connected. Can happen in case of
// union + split + union + store. Because of the split all the stores
// will be writing to same location
List<OperatorKey> inputs = storeVertexGroup.getVertexGroupInfo().getInputs();
if (inputs == null || !inputs.contains(pred.getOperatorKey())) {
tezPlan.connect(pred, storeVertexGroup);
}
storeVertexGroup.getVertexGroupInfo().addInput(pred.getOperatorKey());
pred.addVertexGroupStore(clonedUnionStoreOutputs.get(i++).getOperatorKey(),
storeVertexGroup.getOperatorKey());
}
for (TezOperator outputVertexGroup : outputVertexGroupOps) {
List<OperatorKey> inputs = outputVertexGroup.getVertexGroupInfo().getInputs();
if (inputs == null || !inputs.contains(pred.getOperatorKey())) {
tezPlan.connect(pred, outputVertexGroup);
}
outputVertexGroup.getVertexGroupInfo().addInput(pred.getOperatorKey());
}
copyOperatorProperties(pred, unionOp);
tezPlan.disconnect(pred, unionOp);
}
/**
* Connect vertexgroup operator to successor operator in the plan.
*
* Copy the output edge between union operator and successor to between
* predecessors and successor. Predecessor output key and output edge points
* to successor so that we have all the edge configuration, but they are
* connected to the vertex group in the plan.
*
* @param unionOp Union operator
* @param successors Successors of the union operator
* @param unionOutputKeys Output keys of union
* @param outputVertexGroupOp Tez LogicalOutput vertex groups corresponding to the output keys
*
* @throws PlanException
*/
private void connectVertexGroupsToSuccessors(TezOperator unionOp,
List<TezOperator> successors, List<String> unionOutputKeys,
TezOperator[] outputVertexGroupOps) throws PlanException {
// Connect to outputVertexGroupOps
for (Entry<OperatorKey, TezEdgeDescriptor> entry : unionOp.outEdges.entrySet()) {
TezOperator succOp = tezPlan.getOperator(entry.getKey());
TezEdgeDescriptor edge = entry.getValue();
// Edge cannot be one to one as it will get input from two or
// more union predecessors. Change it to SCATTER_GATHER
if (edge.dataMovementType == DataMovementType.ONE_TO_ONE) {
edge.dataMovementType = DataMovementType.SCATTER_GATHER;
edge.partitionerClass = HashValuePartitioner.class;
edge.outputClassName = UnorderedPartitionedKVOutput.class.getName();
edge.inputClassName = UnorderedKVInput.class.getName();
}
TezOperator vertexGroupOp = outputVertexGroupOps[unionOutputKeys.indexOf(entry.getKey().toString())];
for (OperatorKey predKey : unionOp.getUnionMembers()) {
TezOperator pred = tezPlan.getOperator(predKey);
// Keep the output edge directly to successor
// Don't need to keep output edge for vertexgroup
pred.outEdges.put(entry.getKey(), edge);
succOp.inEdges.put(predKey, edge);
}
if(!tezPlan.pathExists(vertexGroupOp, succOp)) {
tezPlan.connect(vertexGroupOp, succOp);
}
}
}
private void replaceSuccessorInputsAndDisconnect(TezOperator unionOp,
List<TezOperator> successors,
List<String> unionOutputKeys,
String[] newOutputKeys)
throws VisitorException {
if (successors != null) {
String unionOpKey = unionOp.getOperatorKey().toString();
// Successor inputs should now point to the vertex groups.
for (TezOperator succ : successors) {
LinkedList<TezInput> inputs = PlanHelper.getPhysicalOperators(succ.plan, TezInput.class);
for (TezInput input : inputs) {
for (String key : input.getTezInputs()) {
if (key.equals(unionOpKey)) {
input.replaceInput(key,
newOutputKeys[unionOutputKeys.indexOf(succ.getOperatorKey().toString())]);
}
}
}
List<POUserFunc> userFuncs = PlanHelper.getPhysicalOperators(succ.plan, POUserFunc.class);
for (POUserFunc userFunc : userFuncs) {
if (userFunc.getFunc() instanceof ReadScalarsTez) {
TezInput tezInput = (TezInput)userFunc.getFunc();
for (String inputKey : tezInput.getTezInputs()) {
if (inputKey.equals(unionOpKey)) {
tezInput.replaceInput(inputKey,
newOutputKeys[unionOutputKeys.indexOf(succ.getOperatorKey().toString())]);
userFunc.getFuncSpec().setCtorArgs(tezInput.getTezInputs());
}
}
}
}
tezPlan.disconnect(unionOp, succ);
}
}
}
private void copyOperatorProperties(TezOperator pred, TezOperator unionOp) throws VisitorException {
pred.UDFs.addAll(unionOp.UDFs);
pred.scalars.addAll(unionOp.scalars);
// Copy only map side properties. For eg: crossKeys.
// Do not copy reduce side specific properties. For eg: useSecondaryKey, segmentBelow, sortOrder, etc
// Also ignore parallelism settings
if (unionOp.getCrossKeys() != null) {
for (String key : unionOp.getCrossKeys()) {
pred.addCrossKey(key);
}
}
pred.copyFeatures(unionOp, Arrays.asList(new OPER_FEATURE[]{OPER_FEATURE.UNION}));
// For skewed join right input
if (unionOp.getSampleOperator() != null) {
if (pred.getSampleOperator() == null) {
pred.setSampleOperator(unionOp.getSampleOperator());
} else if (!pred.getSampleOperator().equals(unionOp.getSampleOperator())) {
throw new VisitorException("Conflicting sample operators "
+ pred.getSampleOperator().toString() + " and "
+ unionOp.getSampleOperator().toString());
}
}
}
public static PhysicalPlan getUnionPredPlanFromSplit(PhysicalPlan plan, String unionOpKey) throws VisitorException {
List<POSplit> splits = PlanHelper.getPhysicalOperators(plan, POSplit.class);
for (POSplit split : splits) {
for (PhysicalPlan subPlan : split.getPlans()) {
if (subPlan.getLeaves().get(0) instanceof POValueOutputTez) {
POValueOutputTez out = (POValueOutputTez) subPlan.getLeaves().get(0);
if (out.containsOutputKey(unionOpKey)) {
return subPlan;
}
}
}
}
throw new VisitorException("Did not find the union predecessor in the split plan");
}
}
|
google/sagetv | 35,155 | java/sage/TVTuningFrequencies.java | /*
* Copyright 2015 The SageTV Authors. All Rights Reserved.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package sage;
public class TVTuningFrequencies
{
private TVTuningFrequencies(){}
public static final int NTSC_M = 0x00000001;
public static final int NTSC_M_J = 0x00000002;
public static final int NTSC_433 = 0x00000004;
public static final int PAL_B = 0x00000010;
public static final int PAL_D = 0x00000020;
public static final int PAL_H = 0x00000080;
public static final int PAL_I = 0x00000100;
public static final int PAL_M = 0x00000200;
public static final int PAL_N = 0x00000400;
public static final int PAL_60 = 0x00000800;
public static final int SECAM_B = 0x00001000;
public static final int SECAM_D = 0x00002000;
public static final int SECAM_G = 0x00004000;
public static final int SECAM_H = 0x00008000;
public static final int SECAM_K = 0x00010000;
public static final int SECAM_K1 = 0x00020000;
public static final int SECAM_L = 0x00040000;
public static final int SECAM_L1 = 0x00080000;
public static final String[] COUNTRIES = {
"United States of America",
"Anguilla",
"Antigua",
"Bahamas",
"Barbados",
"Bermuda",
"British Virgin Islands",
"Canada",
"Cayman Islands",
"Dominica",
"Dominican Republic",
"Grenada",
"Jamaica",
"Montserrat",
"Nevis",
"St. Kitts",
"St. Vincent and the Grenadines",
"Trinidad and Tobago",
"Turks and Caicos Islands",
"Barbuda",
"Puerto Rico",
"Saint Lucia",
"United States Virgin Islands",
// "Canada", // duplicate
"Russia",
"Kazakhstan",
"Kyrgyzstan",
"Tajikistan",
"Turkmenistan",
"Uzbekistan",
"Egypt",
"South Africa",
"Greece",
"Netherlands",
"Belgium",
"France",
"Spain",
"Hungary",
"Italy",
"Vatican City",
"Romania",
"Switzerland",
"Liechtenstein",
"Austria",
"United Kingdom",
"Denmark",
"Sweden",
"Norway",
"Poland",
"Germany",
"Peru",
"Mexico",
"Cuba",
"Guantanamo Bay",
"Argentina",
"Brazil",
"Chile",
"Colombia",
"Bolivarian Republic of Venezuela",
"Malaysia",
"Australia",
"Cocos-Keeling Islands",
"Indonesia",
"Philippines",
"New Zealand",
"Singapore",
"Thailand",
"Japan",
"Korea (South)",
"Vietnam",
"China",
"Turkey",
"India",
"Pakistan",
"Afghanistan",
"Sri Lanka",
"Myanmar",
"Iran",
"Morocco",
"Algeria",
"Tunisia",
"Libya",
"Gambia",
"Senegal Republic",
"Mauritania",
"Mali",
"Guinea",
"Cote D'Ivoire",
"Burkina Faso",
"Niger",
"Togo",
"Benin",
"Mauritius",
"Liberia",
"Sierra Leone",
"Ghana",
"Nigeria",
"Chad",
"Central African Republic",
"Cameroon",
"Cape Verde Islands",
"Sao Tome and Principe",
"Equatorial Guinea",
"Gabon",
"Congo",
"Congo(DRC)",
"Angola",
"Guinea-Bissau",
"Diego Garcia",
"Ascension Island",
"Seychelle Islands",
"Sudan",
"Rwanda",
"Ethiopia",
"Somalia",
"Djibouti",
"Kenya",
"Tanzania",
"Uganda",
"Burundi",
"Mozambique",
"Zambia",
"Madagascar",
"Reunion Island",
"Zimbabwe",
"Namibia",
"Malawi",
"Lesotho",
"Botswana",
"Swaziland",
"Mayotte Island",
"Comoros",
"St. Helena",
"Eritrea",
"Aruba",
"Faroe Islands",
"Greenland",
"Gibraltar",
"Portugal",
"Luxembourg",
"Ireland",
"Iceland",
"Albania",
"Malta",
"Cyprus",
"Finland",
"Bulgaria",
"Lithuania",
"Latvia",
"Estonia",
"Moldova",
"Armenia",
"Belarus",
"Andorra",
"Monaco",
"San Marino",
"Ukraine",
"Serbia and Montenegro",
"Croatia",
"Slovenia",
"Bosnia and Herzegovina",
"F.Y.R.O.M. (Former Yugoslav Republic of Macedonia)",
"Czech Republic",
"Slovak Republic",
"Falkland Islands (Islas Malvinas)",
"Belize",
"Guatemala",
"El Salvador",
"Honduras",
"Nicaragua",
"Costa Rica",
"Panama",
"St. Pierre and Miquelon",
"Haiti",
"Guadeloupe",
"French Antilles",
"Bolivia",
"Guyana",
"Ecuador",
"French Guiana",
"Paraguay",
"Martinique",
"Suriname",
"Uruguay",
"Netherlands Antilles",
"Saipan Island",
"Rota Island",
"Tinian Island",
"Guam",
"Christmas Island",
"Australian Antarctic Territory",
"Norfolk Island",
"Brunei",
"Nauru",
"Papua New Guinea",
"Tonga",
"Solomon Islands",
"Vanuatu",
"Fiji Islands",
"Palau",
"Wallis and Futuna Islands",
"Cook Islands",
"Niue",
"Territory of American Samoa",
"Samoa",
"Kiribati Republic",
"New Caledonia",
"Tuvalu",
"French Polynesia",
"Tokelau",
"Micronesia",
"Marshall Islands",
"Korea (North)",
"Hong Kong SAR",
"Macao SAR",
"Cambodia",
"Laos",
"INMARSAT (Atlantic-East)",
"INMARSAT (Pacific)",
"INMARSAT (Indian)",
"INMARSAT (Atlantic-West)",
"Bangladesh",
"Taiwan",
"Maldives",
"Lebanon",
"Jordan",
"Syria",
"Iraq",
"Kuwait",
"Saudi Arabia",
"Yemen",
"Oman",
"United Arab Emirates",
"Israel",
"Bahrain",
"Qatar",
"Bhutan",
"Mongolia",
"Nepal",
"Azerbaijan",
"Georgia"
};
public static final String[] ISO_COUNTRY_CODES = {
"US", //"United States of America",
"AI", //"Anguilla",
"AG", //"Antigua",
"BS", //"Bahamas",
"BB", //"Barbados",
"BM", //"Bermuda",
"VG", //"British Virgin Islands",
"CA", //"Canada",
"KY", //"Cayman Islands",
"DM", //"Dominica",
"DO", //"Dominican Republic",
"GD", //"Grenada",
"JM", //"Jamaica",
"MS", //"Montserrat",
"KN", //"Nevis",
"KN", //"St. Kitts",
"VC", //"St. Vincent and the Grenadines",
"TT", //"Trinidad and Tobago",
"TC", //"Turks and Caicos Islands",
"AG", //"Barbuda",
"PR", //"Puerto Rico",
"LC", //"Saint Lucia",
"UM", //"United States Virgin Islands",
// "Canada", // duplicate
"RU", //"Russia",
"KZ", //"Kazakhstan",
"KG", //"Kyrgyzstan",
"TJ", //"Tajikistan",
"TM", //"Turkmenistan",
"UZ", //"Uzbekistan",
"EG", //"Egypt",
"ZA", //"South Africa",
"GR", //"Greece",
"NL", //"Netherlands",
"BE", //"Belgium",
"FR", //"France",
"ES", //"Spain",
"HU", //"Hungary",
"IT", //"Italy",
"VA", //"Vatican City",
"RO", //"Romania",
"CH", //"Switzerland",
"LI", //"Liechtenstein",
"AT", //"Austria",
"GB", //"United Kingdom",
"DK", //"Denmark",
"SE", //"Sweden",
"NO", //"Norway",
"PL", //"Poland",
"DE", //"Germany",
"PE", //"Peru",
"MX", //"Mexico",
"CU", //"Cuba",
"CU", //"Guantanamo Bay",
"AR", //"Argentina",
"BR", //"Brazil",
"CL", //"Chile",
"CO", //"Colombia",
"BO", //"Bolivarian Republic of Venezuela",
"MY", //"Malaysia",
"AU", //"Australia",
"CC", //"Cocos-Keeling Islands",
"ID", //"Indonesia",
"PH", //"Philippines",
"NZ", //"New Zealand",
"SG", //"Singapore",
"TH", //"Thailand",
"JP", //"Japan",
"KR", //"Korea (South)",
"VN", //"Vietnam",
"CN", //"China",
"TR", //"Turkey",
"IN", //"India",
"PK", //"Pakistan",
"AF", //"Afghanistan",
"LK", //"Sri Lanka",
"MM", //"Myanmar",
"IR", //"Iran",
"MA", //"Morocco",
"DZ", //"Algeria",
"TN", //"Tunisia",
"LY", //"Libya",
"GM", //"Gambia",
"SN", //"Senegal Republic",
"MR", //"Mauritania",
"ML", //"Mali",
"GN", //"Guinea",
"CI", //"Cote D'Ivoire",
"BF", //"Burkina Faso",
"NE", //"Niger",
"TG", //"Togo",
"BJ", //"Benin",
"MU", //"Mauritius",
"LR", //"Liberia",
"SL", //"Sierra Leone",
"GH", //"Ghana",
"NG", //"Nigeria",
"TD", //"Chad",
"CF", //"Central African Republic",
"CM", //"Cameroon",
"CV", //"Cape Verde Islands",
"ST", //"Sao Tome and Principe",
"GQ", //"Equatorial Guinea",
"GA", //"Gabon",
"CG", //"Congo",
"CD", //"Congo(DRC)",
"AO", //"Angola",
"GW", //"Guinea-Bissau",
"", //"Diego Garcia",
"", //"Ascension Island",
"SC", //"Seychelle Islands",
"SD", //"Sudan",
"RW", //"Rwanda",
"ET", //"Ethiopia",
"SO", //"Somalia",
"DJ", //"Djibouti",
"KE", //"Kenya",
"TZ", //"Tanzania",
"UG", //"Uganda",
"BI", //"Burundi",
"MZ", //"Mozambique",
"ZM", //"Zambia",
"MG", //"Madagascar",
"RE", //"Reunion Island",
"ZW", //"Zimbabwe",
"NA", //"Namibia",
"MW", //"Malawi",
"LS", //"Lesotho",
"BW", //"Botswana",
"SZ", //"Swaziland",
"YT", //"Mayotte Island",
"KM", //"Comoros",
"SH", //"St. Helena",
"ER", //"Eritrea",
"AW", //"Aruba",
"FO", //"Faroe Islands",
"GL", //"Greenland",
"GI", //"Gibraltar",
"PT", //"Portugal",
"LU", //"Luxembourg",
"IE", //"Ireland",
"IS", //"Iceland",
"AL", //"Albania",
"MT", //"Malta",
"CY", //"Cyprus",
"FI", //"Finland",
"BG", //"Bulgaria",
"LT", //"Lithuania",
"LV", //"Latvia",
"EE", //"Estonia",
"MD", //"Moldova",
"AM", //"Armenia",
"BY", //"Belarus",
"AD", //"Andorra",
"MC", //"Monaco",
"SM", //"San Marino",
"UA", //"Ukraine",
"CS", //"Serbia and Montenegro",
"HR", //"Croatia",
"SI", //"Slovenia",
"BA", //"Bosnia and Herzegovina",
"MK", //"F.Y.R.O.M. (Former Yugoslav Republic of Macedonia)",
"CZ", //"Czech Republic",
"SK", //"Slovak Republic",
"FK", //"Falkland Islands (Islas Malvinas)",
"BZ", //"Belize",
"GT", //"Guatemala",
"SV", //"El Salvador",
"HN", //"Honduras",
"NI", //"Nicaragua",
"CR", //"Costa Rica",
"PA", //"Panama",
"PM", //"St. Pierre and Miquelon",
"HT", //"Haiti",
"GP", //"Guadeloupe",
"FR", //"French Antilles",
"BO", //"Bolivia",
"GY", //"Guyana",
"EC", //"Ecuador",
"GF", //"French Guiana",
"PY", //"Paraguay",
"MQ", //"Martinique",
"SR", //"Suriname",
"UY", //"Uruguay",
"AN", //"Netherlands Antilles",
"", //"Saipan Island",
"", //"Rota Island",
"", //"Tinian Island",
"GU", //"Guam",
"CK", //"Christmas Island",
"AQ", //"Australian Antarctic Territory",
"NF", //"Norfolk Island",
"BN", //"Brunei",
"NR", //"Nauru",
"PG", //"Papua New Guinea",
"TO", //"Tonga",
"SB", //"Solomon Islands",
"VU", //"Vanuatu",
"FJ", //"Fiji Islands",
"PW", //"Palau",
"WF", //"Wallis and Futuna Islands",
"CK", //"Cook Islands",
"NU", //"Niue",
"AS", //"Territory of American Samoa",
"WS", //"Samoa",
"KI", //"Kiribati Republic",
"NC", //"New Caledonia",
"TV", //"Tuvalu",
"PF", //"French Polynesia",
"TK", //"Tokelau",
"FM", //"Micronesia",
"MH", //"Marshall Islands",
"KP", //"Korea (North)",
"HK", //"Hong Kong SAR",
"MO", //"Macao SAR",
"KH", //"Cambodia",
"LA", //"Laos",
"", //"INMARSAT (Atlantic-East)",
"", //"INMARSAT (Pacific)",
"", //"INMARSAT (Indian)",
"", //"INMARSAT (Atlantic-West)",
"BD", //"Bangladesh",
"TW", //"Taiwan",
"MV", //"Maldives",
"LB", //"Lebanon",
"JO", //"Jordan",
"SY", //"Syria",
"IQ", //"Iraq",
"KW", //"Kuwait",
"SA", //"Saudi Arabia",
"YE", //"Yemen",
"OM", //"Oman",
"AE", //"United Arab Emirates",
"IL", //"Israel",
"BH", //"Bahrain",
"QA", //"Qatar",
"BT", //"Bhutan",
"MN", //"Mongolia",
"NP", //"Nepal",
"AZ", //"Azerbaijan",
"GE", //"Georgia"
};
public static final int[] COUNTRY_CODES = {
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
1,
// 2,
7,
7,
7,
7,
7,
7,
20,
27,
30,
31,
32,
33,
34,
36,
39,
39,
40,
41,
41,
43,
44,
45,
46,
47,
48,
49,
51,
52,
53,
53,
54,
55,
56,
57,
58,
60,
61,
61,
62,
63,
64,
65,
66,
81,
82,
84,
86,
90,
91,
92,
93,
94,
95,
98,
212,
213,
216,
218,
220,
221,
222,
223,
224,
225,
226,
227,
228,
229,
230,
231,
232,
233,
234,
235,
236,
237,
238,
239,
240,
241,
242,
243,
244,
245,
246,
247,
248,
249,
250,
251,
252,
253,
254,
255,
256,
257,
258,
260,
261,
262,
263,
264,
265,
266,
267,
268,
269,
269,
290,
291,
297,
298,
299,
350,
351,
352,
353,
354,
355,
356,
357,
358,
359,
370,
371,
372,
373,
374,
375,
376,
377,
378,
380,
381,
385,
386,
387,
389,
420,
421,
500,
501,
502,
503,
504,
505,
506,
507,
508,
509,
590,
590,
591,
592,
593,
594,
595,
596,
597,
598,
599,
670,
670,
670,
671,
672,
672,
672,
673,
674,
675,
676,
677,
678,
679,
680,
681,
682,
683,
684,
685,
686,
687,
688,
689,
690,
691,
692,
850,
852,
853,
855,
856,
871,
872,
873,
874,
880,
886,
960,
961,
962,
963,
964,
965,
966,
967,
968,
971,
972,
973,
974,
975,
976,
977,
994,
995,
};
public static final int F_FIX_BROAD = 0;
public static final int F_USA_BROAD = 1;
public static final int F_USA_CABLE = 2;
public static final int F_OZ__BROAD = 3;
public static final int F_CHN_BROAD = 4;
public static final int F_CHN_CABLE = 5;
public static final int F_CZE_BROAD = 6;
public static final int F_EEU_BROAD = 7;
public static final int F_FRA_BROAD = 8;
public static final int F_FOT_BROAD = 9;
public static final int F_IRE_BROAD = 10;
public static final int F_ITA_BROAD = 11;
public static final int F_JAP_BROAD = 12;
public static final int F_JAP_CABLE = 13;
public static final int F_NZ__BROAD = 14;
public static final int F_UK__BROAD = 15;
public static final int F_UK__CABLE = 16;
public static final int F_WEU_BROAD = 17;
public static final int F_WEU_CABLE = 18;
public static final int F_UNI_CABLE = 19;
public static final int[][] CHANNEL_MIN_MAX = {
{ 2, 69 }, // FIX_BROAD
{ 2, 69 }, // USA_BROAD
{ 1, 158 }, // USA_CABLE
{ 1, 53 }, // OZ__BROAD
{ 1, 68 }, // CHN_BROAD
{ 1, 110 }, // CHN_CABLE
{ 1, 69 }, // CZE_BROAD
{ 1, 69 }, // EEU_BROAD
{ 2, 69 }, // FRA_BROAD
{ 4, 9 }, // FOT_BROAD
{ 1, 10 }, // IRE_BROAD
{ 1, 10 }, // ITA_BROAD
{ 1, 62 }, // JAP_BROAD
{ 1, 113 }, // JAP_CABLE
{ 1, 11 }, // NZ__BROAD
{ 1, 69 }, // UK__BROAD
{ 1, 107 }, // UK__CABLE
{ 1, 69 }, // WEU_BROAD
{ 1, 107 }, // WEU_CABLE
{ 1, 368 } // UNI_CABLE
};
public static final int[] CABLE_AIR_FREQ_CODES = {
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
// F_USA_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_EEU_BROAD,
F_UNI_CABLE, F_EEU_BROAD,
F_UNI_CABLE, F_EEU_BROAD,
F_UNI_CABLE, F_EEU_BROAD,
F_UNI_CABLE, F_EEU_BROAD,
F_UNI_CABLE, F_EEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_UK__BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_WEU_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_FRA_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_EEU_BROAD,
F_UNI_CABLE, F_ITA_BROAD,
F_UNI_CABLE, F_ITA_BROAD,
F_UNI_CABLE, F_EEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UK__CABLE, F_UK__BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_WEU_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_EEU_BROAD,
F_WEU_CABLE, F_WEU_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_OZ__BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_NZ__BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_JAP_CABLE, F_JAP_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_CHN_CABLE, F_CHN_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_FIX_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_FOT_BROAD,
F_UNI_CABLE, F_FOT_BROAD,
F_UNI_CABLE, F_FIX_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_FIX_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_FOT_BROAD,
F_UNI_CABLE, F_FOT_BROAD,
F_UNI_CABLE, F_FOT_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_FIX_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_FIX_BROAD,
F_UNI_CABLE, F_FOT_BROAD,
F_UNI_CABLE, F_FOT_BROAD,
F_UNI_CABLE, F_FOT_BROAD,
F_UNI_CABLE, F_FIX_BROAD,
F_UNI_CABLE, F_FIX_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_FOT_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_FIX_BROAD,
F_UNI_CABLE, F_FIX_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_FOT_BROAD,
F_UNI_CABLE, F_FOT_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_IRE_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_ITA_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_EEU_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_EEU_BROAD,
F_UNI_CABLE, F_EEU_BROAD,
F_UNI_CABLE, F_EEU_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_ITA_BROAD,
F_UNI_CABLE, F_EEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_CZE_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_UK__BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_FOT_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_FOT_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_FOT_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_FOT_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_FOT_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_FOT_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_FOT_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_EEU_BROAD,
F_UNI_CABLE, F_UK__BROAD,
F_UNI_CABLE, F_UK__BROAD,
F_USA_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_USA_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_WEU_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_EEU_BROAD,
F_UNI_CABLE, F_USA_BROAD,
F_UNI_CABLE, F_EEU_BROAD,
F_UNI_CABLE, F_EEU_BROAD,
};
public static final int[] VIDEO_FORMAT_CODES = {
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
//NTSC_M,
SECAM_D,
SECAM_D,
SECAM_D,
SECAM_D,
SECAM_D,
SECAM_D,
SECAM_B,
PAL_I,
SECAM_B,
PAL_B,
PAL_B,
SECAM_L,
PAL_B,
PAL_D,
PAL_B,
PAL_B,
PAL_D,
PAL_B,
PAL_B,
PAL_B,
PAL_I,
PAL_B,
PAL_B,
PAL_B,
PAL_B,
PAL_B,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
PAL_N,
PAL_M,
NTSC_M,
NTSC_M,
NTSC_M,
PAL_B,
PAL_B,
NTSC_M,
PAL_B,
NTSC_M,
PAL_B,
PAL_B,
PAL_B,
NTSC_M_J,
NTSC_M,
NTSC_M,
PAL_D,
PAL_B,
PAL_B,
PAL_B,
PAL_B,
PAL_B,
NTSC_M,
SECAM_B,
SECAM_B,
PAL_B,
SECAM_B,
SECAM_B,
SECAM_K,
SECAM_K,
SECAM_B,
SECAM_K,
SECAM_K,
SECAM_K,
SECAM_K,
SECAM_K,
SECAM_K,
SECAM_K,
SECAM_B,
PAL_B,
PAL_B,
PAL_B,
PAL_B,
PAL_B,
PAL_B,
PAL_B,
NTSC_M,
PAL_B,
SECAM_B,
SECAM_K,
SECAM_D,
SECAM_K,
PAL_I,
NTSC_M,
NTSC_M,
NTSC_M,
PAL_B,
PAL_B,
PAL_B,
PAL_B,
PAL_B,
SECAM_K,
PAL_B,
PAL_B,
PAL_B,
SECAM_K,
PAL_B,
PAL_B,
SECAM_K,
SECAM_K,
PAL_B,
PAL_I,
NTSC_M,
PAL_I,
SECAM_K,
PAL_B,
SECAM_K,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
PAL_B,
NTSC_M,
PAL_B,
PAL_B,
PAL_B,
PAL_I,
PAL_B,
PAL_B,
PAL_B,
PAL_B,
PAL_B,
SECAM_D,
PAL_B,
SECAM_D,
PAL_B,
SECAM_D,
SECAM_D,
SECAM_D,
NTSC_M,
SECAM_G,
PAL_B,
SECAM_D,
PAL_B,
PAL_B,
PAL_B,
PAL_B,
PAL_B,
PAL_D,
PAL_B,
PAL_I,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
SECAM_K,
NTSC_M,
SECAM_K,
NTSC_M,
PAL_N,
SECAM_K,
NTSC_M,
SECAM_K,
PAL_N,
SECAM_K,
NTSC_M,
PAL_N,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
PAL_B,
PAL_B,
NTSC_M,
PAL_B,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
SECAM_K,
PAL_B,
NTSC_M,
NTSC_M,
PAL_B,
PAL_B,
SECAM_K,
NTSC_M,
SECAM_K,
NTSC_M,
NTSC_M,
NTSC_M,
SECAM_D,
PAL_I,
PAL_I,
PAL_B,
PAL_B,
NTSC_M,
NTSC_M,
NTSC_M,
NTSC_M,
PAL_B,
NTSC_M,
PAL_B,
SECAM_B,
PAL_B,
SECAM_B,
SECAM_B,
PAL_B,
SECAM_B,
PAL_B,
PAL_B,
PAL_B,
PAL_B,
PAL_B,
PAL_B,
NTSC_M,
SECAM_D,
PAL_B,
SECAM_D,
SECAM_D
};
public static final int getCountryCode(String country)
{
for (int i = 0; i < COUNTRIES.length; i++)
if (COUNTRIES[i].equalsIgnoreCase(country))
return COUNTRY_CODES[i];
return 0;
}
public static final String getISOCountryCode(String country)
{
for (int i = 0; i < COUNTRIES.length; i++)
if (COUNTRIES[i].equalsIgnoreCase(country))
return ISO_COUNTRY_CODES[i];
return "";
}
private static int getCCIndex(int countryCode)
{
int rv = java.util.Arrays.binarySearch(COUNTRY_CODES, countryCode);
rv = Math.max(0, Math.min(COUNTRY_CODES.length - 1, rv));
return rv;
}
public static final int getCableChannelMax(int countryCode)
{
int idx = getCCIndex(countryCode);
return CHANNEL_MIN_MAX[CABLE_AIR_FREQ_CODES[idx*2]][1];
}
public static final int getCableChannelMin(int countryCode)
{
int idx = getCCIndex(countryCode);
return CHANNEL_MIN_MAX[CABLE_AIR_FREQ_CODES[idx*2]][0];
}
public static final int getBroadcastChannelMax(int countryCode)
{
int idx = getCCIndex(countryCode);
return CHANNEL_MIN_MAX[CABLE_AIR_FREQ_CODES[idx*2 + 1]][1];
}
public static final int getBroadcastChannelMin(int countryCode)
{
int idx = getCCIndex(countryCode);
return CHANNEL_MIN_MAX[CABLE_AIR_FREQ_CODES[idx*2 + 1]][0];
}
public static final int getVideoFormatCode(int countryCode)
{
int idx = getCCIndex(countryCode);
return VIDEO_FORMAT_CODES[idx];
}
private static String[][] DVBT_REGIONS = new String[COUNTRIES.length][];
private static String[][] DVBC_REGIONS = new String[COUNTRIES.length][];
private static String[] DVBS_REGIONS = Pooler.EMPTY_STRING_ARRAY;
private static boolean loadedTuningFreqFiles = false;
private static final Object tuningFileLock = new Object();
private static java.util.Map buildCountryRegionMap(String sourceFile)
{
String[] sortedCountries = (String[])COUNTRIES.clone();
java.util.Arrays.sort(sortedCountries);
java.io.BufferedReader br = null;
java.util.Map countryToRegionsMap = new java.util.HashMap();
try
{
br = new java.io.BufferedReader(new java.io.FileReader(sourceFile));
while (true)
{
String currLine = br.readLine();
if (currLine == null)
break;
currLine = currLine.trim();
// Skip empty lines and comments
if (currLine.length() == 0 || currLine.charAt(0) == '#')
continue;
while (currLine.charAt(0) == '"')
{
// Line starts with a quote, that's what we want. Find the closing quote. There may be multiple on one line separated by commas
int nextQuote = currLine.indexOf('"', 1);
if (nextQuote == -1)
break;
String currName = currLine.substring(1, nextQuote).trim();
//if (Sage.DBG) System.out.println("Found country/region name of: " + currName);
// Find it in our sorted list
int nameIndex = java.util.Arrays.binarySearch(sortedCountries, currName);
if (nameIndex >= 0)
{
//System.out.println(currName + " is a country");
}
else
{
// Find the matching country name.
nameIndex = -1 * (nameIndex + 1);
do
{
nameIndex--; // to align with the country name we'd be inserted after
}while (nameIndex >= 0 && !currName.startsWith(sortedCountries[nameIndex]));
if (nameIndex >= 0)
{
if (currName.startsWith(sortedCountries[nameIndex]))
{
String currCountry = sortedCountries[nameIndex];
String currRegion = currName.substring(sortedCountries[nameIndex].length() + 1);
//if (Sage.DBG) System.out.println(currRegion + " is a region in country " + currCountry);
java.util.Vector regionList = (java.util.Vector) countryToRegionsMap.get(currCountry);
if (regionList == null)
{
regionList = new java.util.Vector();
countryToRegionsMap.put(currCountry, regionList);
}
regionList.add(currRegion);
}
// else
// if (Sage.DBG) System.out.println("ERROR in country name of:" + currName + " it did not match:" + sortedCountries[nameIndex]);
}
// else
// if (Sage.DBG) System.out.println("ERROR in country name of:" + currName);
}
// Find the next quote if it's there
nextQuote = currLine.indexOf('"', nextQuote + 1);
if (nextQuote != -1)
currLine = currLine.substring(nextQuote);
else
break;
}
}
}
catch (java.io.IOException e)
{
System.out.println("ERROR: Could not load " + sourceFile + " file!!!!!");
}
finally
{
try
{
if (br != null)
br.close();
br = null;
}
catch (Exception e){}
}
return countryToRegionsMap;
}
private static String[] buildRegionList(String sourceFile)
{
java.io.BufferedReader br = null;
java.util.Vector regionsList = new java.util.Vector();
try
{
br = new java.io.BufferedReader(new java.io.FileReader(sourceFile));
while (true)
{
String currLine = br.readLine();
if (currLine == null)
break;
currLine = currLine.trim();
// Skip empty lines and comments
if (currLine.length() == 0 || currLine.charAt(0) == '#')
continue;
while (currLine.charAt(0) == '"')
{
// Line starts with a quote, that's what we want. Find the closing quote. There may be multiple on one line separated by commas
int nextQuote = currLine.indexOf('"', 1);
if (nextQuote == -1)
break;
String currName = currLine.substring(1, nextQuote).trim();
regionsList.add(currName);
// Find the next quote if it's there
nextQuote = currLine.indexOf('"', nextQuote + 1);
if (nextQuote != -1)
currLine = currLine.substring(nextQuote);
else
break;
}
}
}
catch (java.io.IOException e)
{
System.out.println("ERROR: Could not load " + sourceFile + " file!!!!!");
}
finally
{
try
{
if (br != null)
br.close();
br = null;
}
catch (Exception e){}
}
return (String[]) regionsList.toArray(Pooler.EMPTY_STRING_ARRAY);
}
private static void loadTuningFreqFile()
{
if (loadedTuningFreqFiles)
return;
synchronized (tuningFileLock)
{
if (loadedTuningFreqFiles)
return;
java.util.Map countryToRegionsMap = buildCountryRegionMap("PredefinedDVBT.frq");
for (int i = 0; i < COUNTRIES.length; i++)
{
java.util.Vector currList = (java.util.Vector) countryToRegionsMap.get(COUNTRIES[i]);
if (currList != null)
{
DVBT_REGIONS[i] = (String[]) currList.toArray(Pooler.EMPTY_STRING_ARRAY);
}
}
countryToRegionsMap = buildCountryRegionMap("PredefinedDVBC.frq");
for (int i = 0; i < COUNTRIES.length; i++)
{
java.util.Vector currList = (java.util.Vector) countryToRegionsMap.get(COUNTRIES[i]);
if (currList != null)
{
DVBC_REGIONS[i] = (String[]) currList.toArray(Pooler.EMPTY_STRING_ARRAY);
}
}
DVBS_REGIONS = buildRegionList("PredefinedDVBS.frq");
loadedTuningFreqFiles = true;
}
}
public static final boolean doesCountryHaveDVBTRegions(int countryCode)
{
loadTuningFreqFile();
int idx = getCCIndex(countryCode);
if (DVBT_REGIONS[idx] == null)
return false;
else
return DVBT_REGIONS[idx].length > 0;
}
public static final String[] getDVBTRegionsForCountry(int countryCode)
{
loadTuningFreqFile();
int idx = getCCIndex(countryCode);
if (DVBT_REGIONS[idx] == null)
return Pooler.EMPTY_STRING_ARRAY;
else
return (String[])DVBT_REGIONS[idx].clone();
}
public static final boolean doesCountryHaveDVBCRegions(int countryCode)
{
loadTuningFreqFile();
int idx = getCCIndex(countryCode);
if (DVBC_REGIONS[idx] == null)
return false;
else
return DVBC_REGIONS[idx].length > 0;
}
public static final String[] getDVBCRegionsForCountry(int countryCode)
{
loadTuningFreqFile();
int idx = getCCIndex(countryCode);
if (DVBC_REGIONS[idx] == null)
return Pooler.EMPTY_STRING_ARRAY;
else
return (String[])DVBC_REGIONS[idx].clone();
}
// DVB-S is not country-based
public static final boolean doesCountryHaveDVBSRegions(int countryCode)
{
return true;
}
public static final String[] getDVBSRegionsForCountry(int countryCode)
{
loadTuningFreqFile();
return (String[])DVBS_REGIONS.clone();
}
}
|
googleapis/google-cloud-java | 36,973 | java-bigquerymigration/proto-google-cloud-bigquerymigration-v2alpha/src/main/java/com/google/cloud/bigquery/migration/v2alpha/SqlTranslationError.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/bigquery/migration/v2alpha/translation_service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.bigquery.migration.v2alpha;
/**
*
*
* <pre>
* The detailed error object if the SQL translation job fails.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.migration.v2alpha.SqlTranslationError}
*/
public final class SqlTranslationError extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.bigquery.migration.v2alpha.SqlTranslationError)
SqlTranslationErrorOrBuilder {
private static final long serialVersionUID = 0L;
// Use SqlTranslationError.newBuilder() to construct.
private SqlTranslationError(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private SqlTranslationError() {
errorType_ = 0;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new SqlTranslationError();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.migration.v2alpha.TranslationServiceProto
.internal_static_google_cloud_bigquery_migration_v2alpha_SqlTranslationError_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.migration.v2alpha.TranslationServiceProto
.internal_static_google_cloud_bigquery_migration_v2alpha_SqlTranslationError_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError.class,
com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError.Builder.class);
}
/**
*
*
* <pre>
* The error type of the SQL translation job.
* </pre>
*
* Protobuf enum {@code
* google.cloud.bigquery.migration.v2alpha.SqlTranslationError.SqlTranslationErrorType}
*/
public enum SqlTranslationErrorType implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* SqlTranslationErrorType not specified.
* </pre>
*
* <code>SQL_TRANSLATION_ERROR_TYPE_UNSPECIFIED = 0;</code>
*/
SQL_TRANSLATION_ERROR_TYPE_UNSPECIFIED(0),
/**
*
*
* <pre>
* Failed to parse the input text as a SQL query.
* </pre>
*
* <code>SQL_PARSE_ERROR = 1;</code>
*/
SQL_PARSE_ERROR(1),
/**
*
*
* <pre>
* Found unsupported functions in the input SQL query that are not able to
* translate.
* </pre>
*
* <code>UNSUPPORTED_SQL_FUNCTION = 2;</code>
*/
UNSUPPORTED_SQL_FUNCTION(2),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* SqlTranslationErrorType not specified.
* </pre>
*
* <code>SQL_TRANSLATION_ERROR_TYPE_UNSPECIFIED = 0;</code>
*/
public static final int SQL_TRANSLATION_ERROR_TYPE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Failed to parse the input text as a SQL query.
* </pre>
*
* <code>SQL_PARSE_ERROR = 1;</code>
*/
public static final int SQL_PARSE_ERROR_VALUE = 1;
/**
*
*
* <pre>
* Found unsupported functions in the input SQL query that are not able to
* translate.
* </pre>
*
* <code>UNSUPPORTED_SQL_FUNCTION = 2;</code>
*/
public static final int UNSUPPORTED_SQL_FUNCTION_VALUE = 2;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static SqlTranslationErrorType valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static SqlTranslationErrorType forNumber(int value) {
switch (value) {
case 0:
return SQL_TRANSLATION_ERROR_TYPE_UNSPECIFIED;
case 1:
return SQL_PARSE_ERROR;
case 2:
return UNSUPPORTED_SQL_FUNCTION;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<SqlTranslationErrorType>
internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<SqlTranslationErrorType>
internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<SqlTranslationErrorType>() {
public SqlTranslationErrorType findValueByNumber(int number) {
return SqlTranslationErrorType.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError.getDescriptor()
.getEnumTypes()
.get(0);
}
private static final SqlTranslationErrorType[] VALUES = values();
public static SqlTranslationErrorType valueOf(
com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private SqlTranslationErrorType(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.bigquery.migration.v2alpha.SqlTranslationError.SqlTranslationErrorType)
}
private int bitField0_;
public static final int ERROR_TYPE_FIELD_NUMBER = 1;
private int errorType_ = 0;
/**
*
*
* <pre>
* The type of SQL translation error.
* </pre>
*
* <code>
* .google.cloud.bigquery.migration.v2alpha.SqlTranslationError.SqlTranslationErrorType error_type = 1;
* </code>
*
* @return The enum numeric value on the wire for errorType.
*/
@java.lang.Override
public int getErrorTypeValue() {
return errorType_;
}
/**
*
*
* <pre>
* The type of SQL translation error.
* </pre>
*
* <code>
* .google.cloud.bigquery.migration.v2alpha.SqlTranslationError.SqlTranslationErrorType error_type = 1;
* </code>
*
* @return The errorType.
*/
@java.lang.Override
public com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError.SqlTranslationErrorType
getErrorType() {
com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError.SqlTranslationErrorType result =
com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError.SqlTranslationErrorType
.forNumber(errorType_);
return result == null
? com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError.SqlTranslationErrorType
.UNRECOGNIZED
: result;
}
public static final int ERROR_DETAIL_FIELD_NUMBER = 2;
private com.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail errorDetail_;
/**
*
*
* <pre>
* Specifies the details of the error, including the error message and
* location from the source text.
* </pre>
*
* <code>.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail error_detail = 2;
* </code>
*
* @return Whether the errorDetail field is set.
*/
@java.lang.Override
public boolean hasErrorDetail() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Specifies the details of the error, including the error message and
* location from the source text.
* </pre>
*
* <code>.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail error_detail = 2;
* </code>
*
* @return The errorDetail.
*/
@java.lang.Override
public com.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail getErrorDetail() {
return errorDetail_ == null
? com.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail.getDefaultInstance()
: errorDetail_;
}
/**
*
*
* <pre>
* Specifies the details of the error, including the error message and
* location from the source text.
* </pre>
*
* <code>.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail error_detail = 2;
* </code>
*/
@java.lang.Override
public com.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetailOrBuilder
getErrorDetailOrBuilder() {
return errorDetail_ == null
? com.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail.getDefaultInstance()
: errorDetail_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (errorType_
!= com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError.SqlTranslationErrorType
.SQL_TRANSLATION_ERROR_TYPE_UNSPECIFIED
.getNumber()) {
output.writeEnum(1, errorType_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(2, getErrorDetail());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (errorType_
!= com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError.SqlTranslationErrorType
.SQL_TRANSLATION_ERROR_TYPE_UNSPECIFIED
.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, errorType_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getErrorDetail());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError)) {
return super.equals(obj);
}
com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError other =
(com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError) obj;
if (errorType_ != other.errorType_) return false;
if (hasErrorDetail() != other.hasErrorDetail()) return false;
if (hasErrorDetail()) {
if (!getErrorDetail().equals(other.getErrorDetail())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + ERROR_TYPE_FIELD_NUMBER;
hash = (53 * hash) + errorType_;
if (hasErrorDetail()) {
hash = (37 * hash) + ERROR_DETAIL_FIELD_NUMBER;
hash = (53 * hash) + getErrorDetail().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError parseFrom(
byte[] data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(
com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* The detailed error object if the SQL translation job fails.
* </pre>
*
* Protobuf type {@code google.cloud.bigquery.migration.v2alpha.SqlTranslationError}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.bigquery.migration.v2alpha.SqlTranslationError)
com.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.bigquery.migration.v2alpha.TranslationServiceProto
.internal_static_google_cloud_bigquery_migration_v2alpha_SqlTranslationError_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.bigquery.migration.v2alpha.TranslationServiceProto
.internal_static_google_cloud_bigquery_migration_v2alpha_SqlTranslationError_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError.class,
com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError.Builder.class);
}
// Construct using com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getErrorDetailFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
errorType_ = 0;
errorDetail_ = null;
if (errorDetailBuilder_ != null) {
errorDetailBuilder_.dispose();
errorDetailBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.bigquery.migration.v2alpha.TranslationServiceProto
.internal_static_google_cloud_bigquery_migration_v2alpha_SqlTranslationError_descriptor;
}
@java.lang.Override
public com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError
getDefaultInstanceForType() {
return com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError build() {
com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError buildPartial() {
com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError result =
new com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(
com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.errorType_ = errorType_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.errorDetail_ =
errorDetailBuilder_ == null ? errorDetail_ : errorDetailBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError) {
return mergeFrom((com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(
com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError other) {
if (other
== com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError.getDefaultInstance())
return this;
if (other.errorType_ != 0) {
setErrorTypeValue(other.getErrorTypeValue());
}
if (other.hasErrorDetail()) {
mergeErrorDetail(other.getErrorDetail());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
errorType_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18:
{
input.readMessage(getErrorDetailFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int errorType_ = 0;
/**
*
*
* <pre>
* The type of SQL translation error.
* </pre>
*
* <code>
* .google.cloud.bigquery.migration.v2alpha.SqlTranslationError.SqlTranslationErrorType error_type = 1;
* </code>
*
* @return The enum numeric value on the wire for errorType.
*/
@java.lang.Override
public int getErrorTypeValue() {
return errorType_;
}
/**
*
*
* <pre>
* The type of SQL translation error.
* </pre>
*
* <code>
* .google.cloud.bigquery.migration.v2alpha.SqlTranslationError.SqlTranslationErrorType error_type = 1;
* </code>
*
* @param value The enum numeric value on the wire for errorType to set.
* @return This builder for chaining.
*/
public Builder setErrorTypeValue(int value) {
errorType_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The type of SQL translation error.
* </pre>
*
* <code>
* .google.cloud.bigquery.migration.v2alpha.SqlTranslationError.SqlTranslationErrorType error_type = 1;
* </code>
*
* @return The errorType.
*/
@java.lang.Override
public com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError.SqlTranslationErrorType
getErrorType() {
com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError.SqlTranslationErrorType
result =
com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError
.SqlTranslationErrorType.forNumber(errorType_);
return result == null
? com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError.SqlTranslationErrorType
.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* The type of SQL translation error.
* </pre>
*
* <code>
* .google.cloud.bigquery.migration.v2alpha.SqlTranslationError.SqlTranslationErrorType error_type = 1;
* </code>
*
* @param value The errorType to set.
* @return This builder for chaining.
*/
public Builder setErrorType(
com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError.SqlTranslationErrorType
value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
errorType_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* The type of SQL translation error.
* </pre>
*
* <code>
* .google.cloud.bigquery.migration.v2alpha.SqlTranslationError.SqlTranslationErrorType error_type = 1;
* </code>
*
* @return This builder for chaining.
*/
public Builder clearErrorType() {
bitField0_ = (bitField0_ & ~0x00000001);
errorType_ = 0;
onChanged();
return this;
}
private com.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail errorDetail_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail,
com.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail.Builder,
com.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetailOrBuilder>
errorDetailBuilder_;
/**
*
*
* <pre>
* Specifies the details of the error, including the error message and
* location from the source text.
* </pre>
*
* <code>.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail error_detail = 2;
* </code>
*
* @return Whether the errorDetail field is set.
*/
public boolean hasErrorDetail() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Specifies the details of the error, including the error message and
* location from the source text.
* </pre>
*
* <code>.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail error_detail = 2;
* </code>
*
* @return The errorDetail.
*/
public com.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail getErrorDetail() {
if (errorDetailBuilder_ == null) {
return errorDetail_ == null
? com.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail
.getDefaultInstance()
: errorDetail_;
} else {
return errorDetailBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Specifies the details of the error, including the error message and
* location from the source text.
* </pre>
*
* <code>.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail error_detail = 2;
* </code>
*/
public Builder setErrorDetail(
com.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail value) {
if (errorDetailBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
errorDetail_ = value;
} else {
errorDetailBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Specifies the details of the error, including the error message and
* location from the source text.
* </pre>
*
* <code>.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail error_detail = 2;
* </code>
*/
public Builder setErrorDetail(
com.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail.Builder
builderForValue) {
if (errorDetailBuilder_ == null) {
errorDetail_ = builderForValue.build();
} else {
errorDetailBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Specifies the details of the error, including the error message and
* location from the source text.
* </pre>
*
* <code>.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail error_detail = 2;
* </code>
*/
public Builder mergeErrorDetail(
com.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail value) {
if (errorDetailBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& errorDetail_ != null
&& errorDetail_
!= com.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail
.getDefaultInstance()) {
getErrorDetailBuilder().mergeFrom(value);
} else {
errorDetail_ = value;
}
} else {
errorDetailBuilder_.mergeFrom(value);
}
if (errorDetail_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Specifies the details of the error, including the error message and
* location from the source text.
* </pre>
*
* <code>.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail error_detail = 2;
* </code>
*/
public Builder clearErrorDetail() {
bitField0_ = (bitField0_ & ~0x00000002);
errorDetail_ = null;
if (errorDetailBuilder_ != null) {
errorDetailBuilder_.dispose();
errorDetailBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Specifies the details of the error, including the error message and
* location from the source text.
* </pre>
*
* <code>.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail error_detail = 2;
* </code>
*/
public com.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail.Builder
getErrorDetailBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getErrorDetailFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Specifies the details of the error, including the error message and
* location from the source text.
* </pre>
*
* <code>.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail error_detail = 2;
* </code>
*/
public com.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetailOrBuilder
getErrorDetailOrBuilder() {
if (errorDetailBuilder_ != null) {
return errorDetailBuilder_.getMessageOrBuilder();
} else {
return errorDetail_ == null
? com.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail
.getDefaultInstance()
: errorDetail_;
}
}
/**
*
*
* <pre>
* Specifies the details of the error, including the error message and
* location from the source text.
* </pre>
*
* <code>.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail error_detail = 2;
* </code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail,
com.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail.Builder,
com.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetailOrBuilder>
getErrorDetailFieldBuilder() {
if (errorDetailBuilder_ == null) {
errorDetailBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail,
com.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetail.Builder,
com.google.cloud.bigquery.migration.v2alpha.SqlTranslationErrorDetailOrBuilder>(
getErrorDetail(), getParentForChildren(), isClean());
errorDetail_ = null;
}
return errorDetailBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.bigquery.migration.v2alpha.SqlTranslationError)
}
// @@protoc_insertion_point(class_scope:google.cloud.bigquery.migration.v2alpha.SqlTranslationError)
private static final com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError
DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError();
}
public static com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError
getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<SqlTranslationError> PARSER =
new com.google.protobuf.AbstractParser<SqlTranslationError>() {
@java.lang.Override
public SqlTranslationError parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<SqlTranslationError> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<SqlTranslationError> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.bigquery.migration.v2alpha.SqlTranslationError
getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 37,181 | java-kms/proto-google-cloud-kms-v1/src/main/java/com/google/cloud/kms/v1/GenerateRandomBytesResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/kms/v1/service.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.kms.v1;
/**
*
*
* <pre>
* Response message for
* [KeyManagementService.GenerateRandomBytes][google.cloud.kms.v1.KeyManagementService.GenerateRandomBytes].
* </pre>
*
* Protobuf type {@code google.cloud.kms.v1.GenerateRandomBytesResponse}
*/
public final class GenerateRandomBytesResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.kms.v1.GenerateRandomBytesResponse)
GenerateRandomBytesResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use GenerateRandomBytesResponse.newBuilder() to construct.
private GenerateRandomBytesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private GenerateRandomBytesResponse() {
data_ = com.google.protobuf.ByteString.EMPTY;
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new GenerateRandomBytesResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.kms.v1.KmsProto
.internal_static_google_cloud_kms_v1_GenerateRandomBytesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.kms.v1.KmsProto
.internal_static_google_cloud_kms_v1_GenerateRandomBytesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.kms.v1.GenerateRandomBytesResponse.class,
com.google.cloud.kms.v1.GenerateRandomBytesResponse.Builder.class);
}
private int bitField0_;
public static final int DATA_FIELD_NUMBER = 1;
private com.google.protobuf.ByteString data_ = com.google.protobuf.ByteString.EMPTY;
/**
*
*
* <pre>
* The generated data.
* </pre>
*
* <code>bytes data = 1;</code>
*
* @return The data.
*/
@java.lang.Override
public com.google.protobuf.ByteString getData() {
return data_;
}
public static final int DATA_CRC32C_FIELD_NUMBER = 3;
private com.google.protobuf.Int64Value dataCrc32C_;
/**
*
*
* <pre>
* Integrity verification field. A CRC32C checksum of the returned
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data].
* An integrity check of
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data]
* can be performed by computing the CRC32C checksum of
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data]
* and comparing your results to this field. Discard the response in case of
* non-matching checksum values, and perform a limited number of retries. A
* persistent mismatch may indicate an issue in your computation of the CRC32C
* checksum. Note: This field is defined as int64 for reasons of compatibility
* across different languages. However, it is a non-negative integer, which
* will never exceed 2^32-1, and can be safely downconverted to uint32 in
* languages that support this type.
* </pre>
*
* <code>.google.protobuf.Int64Value data_crc32c = 3;</code>
*
* @return Whether the dataCrc32c field is set.
*/
@java.lang.Override
public boolean hasDataCrc32C() {
return ((bitField0_ & 0x00000001) != 0);
}
/**
*
*
* <pre>
* Integrity verification field. A CRC32C checksum of the returned
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data].
* An integrity check of
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data]
* can be performed by computing the CRC32C checksum of
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data]
* and comparing your results to this field. Discard the response in case of
* non-matching checksum values, and perform a limited number of retries. A
* persistent mismatch may indicate an issue in your computation of the CRC32C
* checksum. Note: This field is defined as int64 for reasons of compatibility
* across different languages. However, it is a non-negative integer, which
* will never exceed 2^32-1, and can be safely downconverted to uint32 in
* languages that support this type.
* </pre>
*
* <code>.google.protobuf.Int64Value data_crc32c = 3;</code>
*
* @return The dataCrc32c.
*/
@java.lang.Override
public com.google.protobuf.Int64Value getDataCrc32C() {
return dataCrc32C_ == null ? com.google.protobuf.Int64Value.getDefaultInstance() : dataCrc32C_;
}
/**
*
*
* <pre>
* Integrity verification field. A CRC32C checksum of the returned
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data].
* An integrity check of
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data]
* can be performed by computing the CRC32C checksum of
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data]
* and comparing your results to this field. Discard the response in case of
* non-matching checksum values, and perform a limited number of retries. A
* persistent mismatch may indicate an issue in your computation of the CRC32C
* checksum. Note: This field is defined as int64 for reasons of compatibility
* across different languages. However, it is a non-negative integer, which
* will never exceed 2^32-1, and can be safely downconverted to uint32 in
* languages that support this type.
* </pre>
*
* <code>.google.protobuf.Int64Value data_crc32c = 3;</code>
*/
@java.lang.Override
public com.google.protobuf.Int64ValueOrBuilder getDataCrc32COrBuilder() {
return dataCrc32C_ == null ? com.google.protobuf.Int64Value.getDefaultInstance() : dataCrc32C_;
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (!data_.isEmpty()) {
output.writeBytes(1, data_);
}
if (((bitField0_ & 0x00000001) != 0)) {
output.writeMessage(3, getDataCrc32C());
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (!data_.isEmpty()) {
size += com.google.protobuf.CodedOutputStream.computeBytesSize(1, data_);
}
if (((bitField0_ & 0x00000001) != 0)) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getDataCrc32C());
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.kms.v1.GenerateRandomBytesResponse)) {
return super.equals(obj);
}
com.google.cloud.kms.v1.GenerateRandomBytesResponse other =
(com.google.cloud.kms.v1.GenerateRandomBytesResponse) obj;
if (!getData().equals(other.getData())) return false;
if (hasDataCrc32C() != other.hasDataCrc32C()) return false;
if (hasDataCrc32C()) {
if (!getDataCrc32C().equals(other.getDataCrc32C())) return false;
}
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + DATA_FIELD_NUMBER;
hash = (53 * hash) + getData().hashCode();
if (hasDataCrc32C()) {
hash = (37 * hash) + DATA_CRC32C_FIELD_NUMBER;
hash = (53 * hash) + getDataCrc32C().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.kms.v1.GenerateRandomBytesResponse parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.kms.v1.GenerateRandomBytesResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.kms.v1.GenerateRandomBytesResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.kms.v1.GenerateRandomBytesResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.kms.v1.GenerateRandomBytesResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.kms.v1.GenerateRandomBytesResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.kms.v1.GenerateRandomBytesResponse parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.kms.v1.GenerateRandomBytesResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.kms.v1.GenerateRandomBytesResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.kms.v1.GenerateRandomBytesResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.kms.v1.GenerateRandomBytesResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.kms.v1.GenerateRandomBytesResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.kms.v1.GenerateRandomBytesResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Response message for
* [KeyManagementService.GenerateRandomBytes][google.cloud.kms.v1.KeyManagementService.GenerateRandomBytes].
* </pre>
*
* Protobuf type {@code google.cloud.kms.v1.GenerateRandomBytesResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.kms.v1.GenerateRandomBytesResponse)
com.google.cloud.kms.v1.GenerateRandomBytesResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.kms.v1.KmsProto
.internal_static_google_cloud_kms_v1_GenerateRandomBytesResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.kms.v1.KmsProto
.internal_static_google_cloud_kms_v1_GenerateRandomBytesResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.kms.v1.GenerateRandomBytesResponse.class,
com.google.cloud.kms.v1.GenerateRandomBytesResponse.Builder.class);
}
// Construct using com.google.cloud.kms.v1.GenerateRandomBytesResponse.newBuilder()
private Builder() {
maybeForceBuilderInitialization();
}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
maybeForceBuilderInitialization();
}
private void maybeForceBuilderInitialization() {
if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) {
getDataCrc32CFieldBuilder();
}
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
data_ = com.google.protobuf.ByteString.EMPTY;
dataCrc32C_ = null;
if (dataCrc32CBuilder_ != null) {
dataCrc32CBuilder_.dispose();
dataCrc32CBuilder_ = null;
}
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.kms.v1.KmsProto
.internal_static_google_cloud_kms_v1_GenerateRandomBytesResponse_descriptor;
}
@java.lang.Override
public com.google.cloud.kms.v1.GenerateRandomBytesResponse getDefaultInstanceForType() {
return com.google.cloud.kms.v1.GenerateRandomBytesResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.kms.v1.GenerateRandomBytesResponse build() {
com.google.cloud.kms.v1.GenerateRandomBytesResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.kms.v1.GenerateRandomBytesResponse buildPartial() {
com.google.cloud.kms.v1.GenerateRandomBytesResponse result =
new com.google.cloud.kms.v1.GenerateRandomBytesResponse(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.kms.v1.GenerateRandomBytesResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.data_ = data_;
}
int to_bitField0_ = 0;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.dataCrc32C_ = dataCrc32CBuilder_ == null ? dataCrc32C_ : dataCrc32CBuilder_.build();
to_bitField0_ |= 0x00000001;
}
result.bitField0_ |= to_bitField0_;
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.kms.v1.GenerateRandomBytesResponse) {
return mergeFrom((com.google.cloud.kms.v1.GenerateRandomBytesResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.kms.v1.GenerateRandomBytesResponse other) {
if (other == com.google.cloud.kms.v1.GenerateRandomBytesResponse.getDefaultInstance())
return this;
if (other.getData() != com.google.protobuf.ByteString.EMPTY) {
setData(other.getData());
}
if (other.hasDataCrc32C()) {
mergeDataCrc32C(other.getDataCrc32C());
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
data_ = input.readBytes();
bitField0_ |= 0x00000001;
break;
} // case 10
case 26:
{
input.readMessage(getDataCrc32CFieldBuilder().getBuilder(), extensionRegistry);
bitField0_ |= 0x00000002;
break;
} // case 26
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private com.google.protobuf.ByteString data_ = com.google.protobuf.ByteString.EMPTY;
/**
*
*
* <pre>
* The generated data.
* </pre>
*
* <code>bytes data = 1;</code>
*
* @return The data.
*/
@java.lang.Override
public com.google.protobuf.ByteString getData() {
return data_;
}
/**
*
*
* <pre>
* The generated data.
* </pre>
*
* <code>bytes data = 1;</code>
*
* @param value The data to set.
* @return This builder for chaining.
*/
public Builder setData(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
data_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* The generated data.
* </pre>
*
* <code>bytes data = 1;</code>
*
* @return This builder for chaining.
*/
public Builder clearData() {
bitField0_ = (bitField0_ & ~0x00000001);
data_ = getDefaultInstance().getData();
onChanged();
return this;
}
private com.google.protobuf.Int64Value dataCrc32C_;
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Int64Value,
com.google.protobuf.Int64Value.Builder,
com.google.protobuf.Int64ValueOrBuilder>
dataCrc32CBuilder_;
/**
*
*
* <pre>
* Integrity verification field. A CRC32C checksum of the returned
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data].
* An integrity check of
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data]
* can be performed by computing the CRC32C checksum of
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data]
* and comparing your results to this field. Discard the response in case of
* non-matching checksum values, and perform a limited number of retries. A
* persistent mismatch may indicate an issue in your computation of the CRC32C
* checksum. Note: This field is defined as int64 for reasons of compatibility
* across different languages. However, it is a non-negative integer, which
* will never exceed 2^32-1, and can be safely downconverted to uint32 in
* languages that support this type.
* </pre>
*
* <code>.google.protobuf.Int64Value data_crc32c = 3;</code>
*
* @return Whether the dataCrc32c field is set.
*/
public boolean hasDataCrc32C() {
return ((bitField0_ & 0x00000002) != 0);
}
/**
*
*
* <pre>
* Integrity verification field. A CRC32C checksum of the returned
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data].
* An integrity check of
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data]
* can be performed by computing the CRC32C checksum of
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data]
* and comparing your results to this field. Discard the response in case of
* non-matching checksum values, and perform a limited number of retries. A
* persistent mismatch may indicate an issue in your computation of the CRC32C
* checksum. Note: This field is defined as int64 for reasons of compatibility
* across different languages. However, it is a non-negative integer, which
* will never exceed 2^32-1, and can be safely downconverted to uint32 in
* languages that support this type.
* </pre>
*
* <code>.google.protobuf.Int64Value data_crc32c = 3;</code>
*
* @return The dataCrc32c.
*/
public com.google.protobuf.Int64Value getDataCrc32C() {
if (dataCrc32CBuilder_ == null) {
return dataCrc32C_ == null
? com.google.protobuf.Int64Value.getDefaultInstance()
: dataCrc32C_;
} else {
return dataCrc32CBuilder_.getMessage();
}
}
/**
*
*
* <pre>
* Integrity verification field. A CRC32C checksum of the returned
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data].
* An integrity check of
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data]
* can be performed by computing the CRC32C checksum of
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data]
* and comparing your results to this field. Discard the response in case of
* non-matching checksum values, and perform a limited number of retries. A
* persistent mismatch may indicate an issue in your computation of the CRC32C
* checksum. Note: This field is defined as int64 for reasons of compatibility
* across different languages. However, it is a non-negative integer, which
* will never exceed 2^32-1, and can be safely downconverted to uint32 in
* languages that support this type.
* </pre>
*
* <code>.google.protobuf.Int64Value data_crc32c = 3;</code>
*/
public Builder setDataCrc32C(com.google.protobuf.Int64Value value) {
if (dataCrc32CBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
dataCrc32C_ = value;
} else {
dataCrc32CBuilder_.setMessage(value);
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Integrity verification field. A CRC32C checksum of the returned
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data].
* An integrity check of
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data]
* can be performed by computing the CRC32C checksum of
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data]
* and comparing your results to this field. Discard the response in case of
* non-matching checksum values, and perform a limited number of retries. A
* persistent mismatch may indicate an issue in your computation of the CRC32C
* checksum. Note: This field is defined as int64 for reasons of compatibility
* across different languages. However, it is a non-negative integer, which
* will never exceed 2^32-1, and can be safely downconverted to uint32 in
* languages that support this type.
* </pre>
*
* <code>.google.protobuf.Int64Value data_crc32c = 3;</code>
*/
public Builder setDataCrc32C(com.google.protobuf.Int64Value.Builder builderForValue) {
if (dataCrc32CBuilder_ == null) {
dataCrc32C_ = builderForValue.build();
} else {
dataCrc32CBuilder_.setMessage(builderForValue.build());
}
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Integrity verification field. A CRC32C checksum of the returned
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data].
* An integrity check of
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data]
* can be performed by computing the CRC32C checksum of
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data]
* and comparing your results to this field. Discard the response in case of
* non-matching checksum values, and perform a limited number of retries. A
* persistent mismatch may indicate an issue in your computation of the CRC32C
* checksum. Note: This field is defined as int64 for reasons of compatibility
* across different languages. However, it is a non-negative integer, which
* will never exceed 2^32-1, and can be safely downconverted to uint32 in
* languages that support this type.
* </pre>
*
* <code>.google.protobuf.Int64Value data_crc32c = 3;</code>
*/
public Builder mergeDataCrc32C(com.google.protobuf.Int64Value value) {
if (dataCrc32CBuilder_ == null) {
if (((bitField0_ & 0x00000002) != 0)
&& dataCrc32C_ != null
&& dataCrc32C_ != com.google.protobuf.Int64Value.getDefaultInstance()) {
getDataCrc32CBuilder().mergeFrom(value);
} else {
dataCrc32C_ = value;
}
} else {
dataCrc32CBuilder_.mergeFrom(value);
}
if (dataCrc32C_ != null) {
bitField0_ |= 0x00000002;
onChanged();
}
return this;
}
/**
*
*
* <pre>
* Integrity verification field. A CRC32C checksum of the returned
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data].
* An integrity check of
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data]
* can be performed by computing the CRC32C checksum of
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data]
* and comparing your results to this field. Discard the response in case of
* non-matching checksum values, and perform a limited number of retries. A
* persistent mismatch may indicate an issue in your computation of the CRC32C
* checksum. Note: This field is defined as int64 for reasons of compatibility
* across different languages. However, it is a non-negative integer, which
* will never exceed 2^32-1, and can be safely downconverted to uint32 in
* languages that support this type.
* </pre>
*
* <code>.google.protobuf.Int64Value data_crc32c = 3;</code>
*/
public Builder clearDataCrc32C() {
bitField0_ = (bitField0_ & ~0x00000002);
dataCrc32C_ = null;
if (dataCrc32CBuilder_ != null) {
dataCrc32CBuilder_.dispose();
dataCrc32CBuilder_ = null;
}
onChanged();
return this;
}
/**
*
*
* <pre>
* Integrity verification field. A CRC32C checksum of the returned
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data].
* An integrity check of
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data]
* can be performed by computing the CRC32C checksum of
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data]
* and comparing your results to this field. Discard the response in case of
* non-matching checksum values, and perform a limited number of retries. A
* persistent mismatch may indicate an issue in your computation of the CRC32C
* checksum. Note: This field is defined as int64 for reasons of compatibility
* across different languages. However, it is a non-negative integer, which
* will never exceed 2^32-1, and can be safely downconverted to uint32 in
* languages that support this type.
* </pre>
*
* <code>.google.protobuf.Int64Value data_crc32c = 3;</code>
*/
public com.google.protobuf.Int64Value.Builder getDataCrc32CBuilder() {
bitField0_ |= 0x00000002;
onChanged();
return getDataCrc32CFieldBuilder().getBuilder();
}
/**
*
*
* <pre>
* Integrity verification field. A CRC32C checksum of the returned
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data].
* An integrity check of
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data]
* can be performed by computing the CRC32C checksum of
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data]
* and comparing your results to this field. Discard the response in case of
* non-matching checksum values, and perform a limited number of retries. A
* persistent mismatch may indicate an issue in your computation of the CRC32C
* checksum. Note: This field is defined as int64 for reasons of compatibility
* across different languages. However, it is a non-negative integer, which
* will never exceed 2^32-1, and can be safely downconverted to uint32 in
* languages that support this type.
* </pre>
*
* <code>.google.protobuf.Int64Value data_crc32c = 3;</code>
*/
public com.google.protobuf.Int64ValueOrBuilder getDataCrc32COrBuilder() {
if (dataCrc32CBuilder_ != null) {
return dataCrc32CBuilder_.getMessageOrBuilder();
} else {
return dataCrc32C_ == null
? com.google.protobuf.Int64Value.getDefaultInstance()
: dataCrc32C_;
}
}
/**
*
*
* <pre>
* Integrity verification field. A CRC32C checksum of the returned
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data].
* An integrity check of
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data]
* can be performed by computing the CRC32C checksum of
* [GenerateRandomBytesResponse.data][google.cloud.kms.v1.GenerateRandomBytesResponse.data]
* and comparing your results to this field. Discard the response in case of
* non-matching checksum values, and perform a limited number of retries. A
* persistent mismatch may indicate an issue in your computation of the CRC32C
* checksum. Note: This field is defined as int64 for reasons of compatibility
* across different languages. However, it is a non-negative integer, which
* will never exceed 2^32-1, and can be safely downconverted to uint32 in
* languages that support this type.
* </pre>
*
* <code>.google.protobuf.Int64Value data_crc32c = 3;</code>
*/
private com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Int64Value,
com.google.protobuf.Int64Value.Builder,
com.google.protobuf.Int64ValueOrBuilder>
getDataCrc32CFieldBuilder() {
if (dataCrc32CBuilder_ == null) {
dataCrc32CBuilder_ =
new com.google.protobuf.SingleFieldBuilderV3<
com.google.protobuf.Int64Value,
com.google.protobuf.Int64Value.Builder,
com.google.protobuf.Int64ValueOrBuilder>(
getDataCrc32C(), getParentForChildren(), isClean());
dataCrc32C_ = null;
}
return dataCrc32CBuilder_;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.kms.v1.GenerateRandomBytesResponse)
}
// @@protoc_insertion_point(class_scope:google.cloud.kms.v1.GenerateRandomBytesResponse)
private static final com.google.cloud.kms.v1.GenerateRandomBytesResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.kms.v1.GenerateRandomBytesResponse();
}
public static com.google.cloud.kms.v1.GenerateRandomBytesResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<GenerateRandomBytesResponse> PARSER =
new com.google.protobuf.AbstractParser<GenerateRandomBytesResponse>() {
@java.lang.Override
public GenerateRandomBytesResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<GenerateRandomBytesResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<GenerateRandomBytesResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.kms.v1.GenerateRandomBytesResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
google/j2cl | 37,045 | transpiler/javatests/com/google/j2cl/integration/java/jsenum/Main.java | /*
* Copyright 2018 Google Inc.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package jsenum;
import static com.google.j2cl.integration.testing.Asserts.assertEquals;
import static com.google.j2cl.integration.testing.Asserts.assertFalse;
import static com.google.j2cl.integration.testing.Asserts.assertThrows;
import static com.google.j2cl.integration.testing.Asserts.assertThrowsClassCastException;
import static com.google.j2cl.integration.testing.Asserts.assertThrowsNullPointerException;
import static com.google.j2cl.integration.testing.Asserts.assertTrue;
import static com.google.j2cl.integration.testing.Asserts.assertUnderlyingTypeEquals;
import static com.google.j2cl.integration.testing.Asserts.fail;
import static jsenum.NativeEnums.nativeClinitCalled;
import com.google.j2cl.integration.testing.TestUtils;
import java.io.Serializable;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.List;
import java.util.SortedSet;
import java.util.TreeSet;
import java.util.function.Function;
import java.util.function.Supplier;
import javaemul.internal.annotations.DoNotAutobox;
import javaemul.internal.annotations.UncheckedCast;
import javaemul.internal.annotations.Wasm;
import jsenum.NativeEnums.NativeEnum;
import jsenum.NativeEnums.NativeEnumWitMissingValues;
import jsenum.NativeEnums.NativeEnumWithClinit;
import jsenum.NativeEnums.NumberNativeEnum;
import jsenum.NativeEnums.StringNativeEnum;
import jsinterop.annotations.JsEnum;
import jsinterop.annotations.JsMethod;
import jsinterop.annotations.JsProperty;
import jsinterop.annotations.JsType;
public class Main {
private static final Object OK_STRING = "Ok";
private static final Object HELLO_STRING = "Hello";
private static final Object ONE_DOUBLE = 1.0d;
private static final Object FALSE_BOOLEAN = false;
public static void main(String... args) {
testNativeJsEnum();
testNativeJsEnumWithMissingValues();
testStringNativeJsEnum();
testCastOnNative();
testComparableJsEnum();
testComparableJsEnumAsSeenFromJs();
testComparableJsEnumIntersectionCasts();
testJsEnumVariableInitialization();
testStringJsEnum();
testStringJsEnumAsSeenFromJs();
testJsEnumClassInitialization();
testNativeEnumClassInitialization();
testDoNotAutoboxJsEnum();
testUnckeckedCastJsEnum();
testReturnsAndParameters();
testAutoBoxing_relationalOperations();
testAutoBoxing_typeInference();
testAutoBoxing_specialMethods();
testAutoBoxing_parameterizedLambda();
testAutoBoxing_intersectionCasts();
testSpecializedSuperType();
testSpecializedSuperTypeUnderlyingType();
testBoxingPartialInlining();
testNonNativeJsEnumArrays();
testNonNativeStringJsEnumArrays();
testNonNativeJsEnumArrayBoxing();
testNativeJsEnumArray();
testJsEnumVarargs();
}
@Wasm("nop") // TODO(b/288145698): Support native JsEnum.
private static void testNativeJsEnum() {
NativeEnum v = NativeEnum.ACCEPT;
switch (v) {
case ACCEPT:
break;
case CANCEL:
fail();
break;
default:
fail();
break;
}
assertThrows(
NullPointerException.class,
() -> {
NativeEnum nullJsEnum = null;
switch (nullJsEnum) {
default:
}
});
assertTrue(v == NativeEnum.ACCEPT);
assertTrue(v != NativeEnum.CANCEL);
// Native JsEnums are not boxed.
assertTrue(v == OK_STRING);
assertTrue(v == (Object) StringNativeEnum.OK);
// No boxing
Object o = NativeEnum.ACCEPT;
assertTrue(o == NativeEnum.ACCEPT);
// Object methods calls on a variable of JsEnum type.
assertTrue(v.hashCode() == NativeEnum.ACCEPT.hashCode());
assertTrue(v.hashCode() != NativeEnum.CANCEL.hashCode());
assertTrue(v.hashCode() == StringNativeEnum.OK.hashCode());
assertTrue(v.toString().equals(OK_STRING));
assertTrue(v.equals(NativeEnum.ACCEPT));
assertTrue(v.equals(OK_STRING));
assertTrue(v.equals(StringNativeEnum.OK));
// Object methods calls on a variable of Object type.
assertTrue(o.hashCode() == NativeEnum.ACCEPT.hashCode());
assertTrue(o.hashCode() != NativeEnum.CANCEL.hashCode());
assertTrue(o.hashCode() == StringNativeEnum.OK.hashCode());
assertTrue(o.toString().equals(OK_STRING));
assertTrue(o.equals(NativeEnum.ACCEPT));
assertTrue(o.equals(OK_STRING));
assertTrue(o.equals(StringNativeEnum.OK));
assertFalse(v instanceof Enum);
assertTrue((Object) v instanceof String);
assertTrue(v instanceof Comparable);
assertTrue(v instanceof Serializable);
assertFalse((Object) v instanceof PlainJsEnum);
NativeEnum ne = (NativeEnum) o;
StringNativeEnum sne = (StringNativeEnum) o;
Comparable ce = (Comparable) o;
ce = (NativeEnum & Comparable<NativeEnum>) o;
Serializable s = (Serializable) o;
assertThrowsClassCastException(
() -> {
Object unused = (Enum) o;
},
Enum.class);
assertThrowsClassCastException(
() -> {
Object unused = (Boolean) o;
},
Boolean.class);
assertTrue(asSeenFromJs(NativeEnum.ACCEPT) == OK_STRING);
}
@Wasm("nop") // TODO(b/288145698): Support native JsEnum.
private static void testNativeJsEnumWithMissingValues() {
NativeEnumWitMissingValues e = (NativeEnumWitMissingValues) (Object) NativeEnum.CANCEL;
int i =
switch (e) {
case OK -> 1;
default -> -1;
};
assertEquals(-1, i);
}
@JsMethod(name = "passThrough")
@Wasm("nop") // TODO(b/288145698): Support native JsEnum.
private static native Object asSeenFromJs(NativeEnum s);
@Wasm("nop") // TODO(b/288145698): Support native JsEnum.
private static void testStringNativeJsEnum() {
StringNativeEnum v = StringNativeEnum.OK;
switch (v) {
case OK:
break;
case CANCEL:
fail();
break;
default:
fail();
break;
}
assertThrows(
NullPointerException.class,
() -> {
StringNativeEnum nullJsEnum = null;
switch (nullJsEnum) {
default:
}
});
assertTrue(v == StringNativeEnum.OK);
assertTrue(v != StringNativeEnum.CANCEL);
assertTrue((Object) v == OK_STRING);
assertTrue(v == (Object) NativeEnum.ACCEPT);
Object o = StringNativeEnum.OK;
assertTrue(o == StringNativeEnum.OK);
// Object methods calls on a variable of JsEnum type.
assertTrue(v.hashCode() == StringNativeEnum.OK.hashCode());
assertTrue(v.hashCode() != StringNativeEnum.CANCEL.hashCode());
assertTrue(v.toString().equals(OK_STRING));
assertTrue(v.equals(StringNativeEnum.OK));
assertTrue(v.equals(NativeEnum.ACCEPT));
assertTrue(v.equals(OK_STRING));
// Object methods calls on a variable of Object type.
assertTrue(o.hashCode() == StringNativeEnum.OK.hashCode());
assertTrue(o.hashCode() != StringNativeEnum.CANCEL.hashCode());
assertTrue(o.toString().equals(OK_STRING));
assertTrue(o.equals(StringNativeEnum.OK));
assertTrue(o.equals(NativeEnum.ACCEPT));
assertTrue(o.equals(OK_STRING));
assertTrue(v.getValue().equals(v.toString()));
assertTrue(v.getValue().equals(OK_STRING));
assertFalse(v instanceof Enum);
assertTrue((Object) v instanceof String);
assertTrue(v instanceof Comparable);
assertTrue(v instanceof Serializable);
assertFalse((Object) v instanceof PlainJsEnum);
Serializable se = (Serializable) o;
StringNativeEnum sne = (StringNativeEnum) o;
NativeEnum ne = (NativeEnum) o;
Comparable ce = (Comparable) o;
Comparable seAndC = (StringNativeEnum & Comparable<StringNativeEnum>) o;
assertUnderlyingTypeEquals(String.class, seAndC);
assertThrowsClassCastException(
() -> {
Object unused = (Enum) o;
},
Enum.class);
assertThrowsClassCastException(
() -> {
Object unused = (Boolean) o;
},
Boolean.class);
assertTrue(asSeenFromJs(StringNativeEnum.OK) == OK_STRING);
}
@Wasm("nop") // TODO(b/288145698): Support native JsEnum.
public static void testCastOnNative() {
castToNativeEnum(NativeEnum.ACCEPT);
castToNativeEnum(StringNativeEnum.OK);
castToNativeEnum(NumberNativeEnum.ONE);
castToNativeEnum(PlainJsEnum.ONE);
castToNativeEnum(OK_STRING);
castToNativeEnum((Double) 2.0);
castToNativeEnum((Integer) 1);
castToStringNativeEnum(StringNativeEnum.OK);
castToStringNativeEnum(NativeEnum.ACCEPT);
castToStringNativeEnum(OK_STRING);
assertThrowsClassCastException(() -> castToStringNativeEnum(NumberNativeEnum.ONE));
assertThrowsClassCastException(() -> castToStringNativeEnum(PlainJsEnum.ONE));
assertThrowsClassCastException(() -> castToStringNativeEnum((Integer) 1));
assertThrowsClassCastException(() -> castToStringNativeEnum((Double) 2.0));
castToNumberNativeEnum(NumberNativeEnum.ONE);
castToNumberNativeEnum((Double) 2.0);
assertThrowsClassCastException(() -> castToNumberNativeEnum(NativeEnum.ACCEPT));
assertThrowsClassCastException(() -> castToNumberNativeEnum(StringNativeEnum.OK));
assertThrowsClassCastException(() -> castToNumberNativeEnum(PlainJsEnum.ONE));
assertThrowsClassCastException(() -> castToNumberNativeEnum((Integer) 1));
assertThrowsClassCastException(() -> castToNumberNativeEnum(OK_STRING));
}
private static NativeEnum castToNativeEnum(Object o) {
return (NativeEnum) o;
}
private static StringNativeEnum castToStringNativeEnum(Object o) {
return (StringNativeEnum) o;
}
private static NumberNativeEnum castToNumberNativeEnum(Object o) {
return (NumberNativeEnum) o;
}
@JsMethod(name = "passThrough")
@Wasm("nop") // Non-native JsMethod not supported in Wasm.
private static native Object asSeenFromJs(StringNativeEnum s);
@JsEnum
enum PlainJsEnum {
@JsProperty(name = "NAUGHT")
ZERO,
ONE,
TWO,
THREE,
FOUR,
FIVE,
SIX,
SEVEN,
EIGHT,
NINE,
TEN;
public int getValue() {
return ordinal();
}
}
@JsEnum
enum OtherPlainJsEnum {
NONE,
UNIT
}
private static void testComparableJsEnum() {
PlainJsEnum v = PlainJsEnum.ONE;
switch (v) {
case ZERO:
fail();
break;
case ONE:
break;
default:
fail();
break;
}
assertThrowsNullPointerException(
() -> {
PlainJsEnum nullJsEnum = null;
switch (nullJsEnum) {
}
});
assertTrue(v == PlainJsEnum.ONE);
assertTrue(v != PlainJsEnum.ZERO);
assertTrue((Object) v != ONE_DOUBLE);
// Boxing preserves equality.
Object o = PlainJsEnum.ONE;
assertTrue(o == PlainJsEnum.ONE);
// Object methods calls on a variable of JsEnum type.
assertTrue(v.hashCode() == PlainJsEnum.ONE.hashCode());
assertTrue(v.hashCode() != PlainJsEnum.ZERO.hashCode());
assertTrue(v.toString().equals(String.valueOf(ONE_DOUBLE)));
assertTrue(v.equals(PlainJsEnum.ONE));
assertFalse(v.equals(ONE_DOUBLE));
assertFalse(PlainJsEnum.ZERO.equals(OtherPlainJsEnum.NONE));
assertThrows(
NullPointerException.class,
() -> {
PlainJsEnum nullJsEnum = null;
nullJsEnum.equals(PlainJsEnum.ZERO);
});
// Object methods calls on a variable of Object type.
assertTrue(o.hashCode() == PlainJsEnum.ONE.hashCode());
assertTrue(o.hashCode() != PlainJsEnum.ZERO.hashCode());
assertTrue(o.toString().equals(String.valueOf(ONE_DOUBLE)));
assertTrue(o.equals(PlainJsEnum.ONE));
assertFalse(o.equals(PlainJsEnum.TWO));
assertTrue(o.equals(v));
assertFalse(o.equals(ONE_DOUBLE));
assertTrue(v.getValue() == 1);
assertTrue(v.ordinal() == 1);
assertTrue(PlainJsEnum.ONE.compareTo(v) == 0);
assertTrue(PlainJsEnum.ZERO.compareTo(v) < 0);
assertThrows(
NullPointerException.class,
() -> {
PlainJsEnum nullJsEnum = null;
nullJsEnum.compareTo(PlainJsEnum.ZERO);
});
assertThrowsClassCastException(
() -> {
Comparable comparable = PlainJsEnum.ONE;
comparable.compareTo(OtherPlainJsEnum.UNIT);
});
assertThrowsClassCastException(
() -> {
Comparable comparable = PlainJsEnum.ONE;
comparable.compareTo(ONE_DOUBLE);
});
assertThrowsClassCastException(
() -> {
Comparable comparable = (Comparable) ONE_DOUBLE;
comparable.compareTo(PlainJsEnum.ONE);
});
assertThrowsClassCastException(
() -> {
Object unused = (Enum<PlainJsEnum> & Comparable<PlainJsEnum>) PlainJsEnum.ONE;
},
Enum.class);
// Test that boxing of special method 'ordinal()' call is not broken by normalization.
Integer i = v.ordinal();
assertTrue(i.intValue() == 1);
if (!TestUtils.isWasm()) {
// JsEnums are still instance of Enum in Wasm.
assertFalse(v instanceof Enum);
}
assertTrue(v instanceof PlainJsEnum);
assertFalse((Object) v instanceof Double);
assertTrue(v instanceof Comparable);
assertTrue(v instanceof Serializable);
assertFalse(new Object() instanceof PlainJsEnum);
assertFalse((Object) ONE_DOUBLE instanceof PlainJsEnum);
PlainJsEnum pe = (PlainJsEnum) o;
Comparable c = (Comparable) o;
Serializable s = (Serializable) o;
assertThrowsClassCastException(
() -> {
Object unused = (Enum) o;
},
Enum.class);
assertThrowsClassCastException(
() -> {
Object unused = (Double) o;
},
Double.class);
// Comparable test.
SortedSet<Comparable> sortedSet = new TreeSet<>(Comparable::compareTo);
sortedSet.add(PlainJsEnum.ONE);
sortedSet.add(PlainJsEnum.ZERO);
assertTrue(sortedSet.iterator().next() == PlainJsEnum.ZERO);
assertTrue(sortedSet.iterator().next() instanceof PlainJsEnum);
}
@Wasm("nop") // Non-native JsMethod not supported in Wasm.
private static void testComparableJsEnumAsSeenFromJs() {
assertTrue(asSeenFromJs(PlainJsEnum.ONE) == ONE_DOUBLE);
}
@Wasm("nop") // TODO(b/182341814, b/295235576): DoNotAutobox not supported in Wasm. JsEnum class
// literals not yet supported in Wasm.
private static void testComparableJsEnumIntersectionCasts() {
Object o = PlainJsEnum.ONE;
// Intersection casts box/or unbox depending on the destination type.
Comparable otherC = (PlainJsEnum & Comparable<PlainJsEnum>) o;
assertUnderlyingTypeEquals(PlainJsEnum.class, otherC);
PlainJsEnum otherPe = (PlainJsEnum & Comparable<PlainJsEnum>) o;
assertUnderlyingTypeEquals(Double.class, otherPe);
}
@JsMethod(name = "passThrough")
@Wasm("nop") // Non-native JsMethod not supported in Wasm.
private static native Object asSeenFromJs(PlainJsEnum d);
public static PlainJsEnum defaultStaticJsEnum;
public static PlainJsEnum oneStaticJsEnum = PlainJsEnum.ONE;
private static void testJsEnumVariableInitialization() {
assertEquals(defaultStaticJsEnum, null);
assertEquals(oneStaticJsEnum, PlainJsEnum.ONE);
PlainJsEnum oneJsEnum = PlainJsEnum.ONE;
assertEquals(oneJsEnum, PlainJsEnum.ONE);
}
@JsEnum(hasCustomValue = true)
enum StringJsEnum {
HELLO("Hello"),
GOODBYE("Good Bye");
String value;
StringJsEnum(String value) {
this.value = value;
}
}
private static void testStringJsEnum() {
StringJsEnum v = StringJsEnum.HELLO;
switch (v) {
case GOODBYE:
fail();
break;
case HELLO:
break;
default:
fail();
break;
}
assertThrowsNullPointerException(
() -> {
StringJsEnum nullJsEnum = null;
switch (nullJsEnum) {
}
});
assertTrue(v == StringJsEnum.HELLO);
assertTrue(v != StringJsEnum.GOODBYE);
assertTrue((Object) v != HELLO_STRING);
// Boxing preserves equality.
Object o = StringJsEnum.HELLO;
assertTrue(o == StringJsEnum.HELLO);
// Object methods calls on a variable of JsEnum type.
assertTrue(v.hashCode() == StringJsEnum.HELLO.hashCode());
assertTrue(v.hashCode() != StringJsEnum.GOODBYE.hashCode());
assertTrue(v.equals(StringJsEnum.HELLO));
assertFalse(v.equals(HELLO_STRING));
assertThrows(
NullPointerException.class,
() -> {
StringJsEnum nullJsEnum = null;
nullJsEnum.equals(StringJsEnum.HELLO);
});
// Object methods calls on a variable of Object type.
assertTrue(o.hashCode() == StringJsEnum.HELLO.hashCode());
assertTrue(o.hashCode() != StringJsEnum.GOODBYE.hashCode());
assertTrue(o.equals(StringJsEnum.HELLO));
assertFalse(o.equals(StringJsEnum.GOODBYE));
assertTrue(o.equals(v));
assertFalse(o.equals(HELLO_STRING));
assertTrue(v.value.equals(HELLO_STRING));
if (!TestUtils.isWasm()) {
// JsEnums are still instance of Enum in Wasm.
assertFalse(v instanceof Enum);
}
assertTrue(v instanceof StringJsEnum);
assertFalse((Object) v instanceof String);
if (!TestUtils.isWasm()) {
// JsEnums are still instance of Enum in Wasm.
assertFalse(v instanceof Comparable);
}
assertTrue(v instanceof Serializable);
assertFalse((Object) v instanceof PlainJsEnum);
assertFalse(new Object() instanceof StringJsEnum);
assertFalse((Object) HELLO_STRING instanceof StringJsEnum);
StringJsEnum se = (StringJsEnum) o;
Serializable s = (Serializable) o;
assertThrowsClassCastException(
() -> {
Object unused = (Enum) o;
},
Enum.class);
assertThrowsClassCastException(
() -> {
Object unused = (Comparable) o;
},
Comparable.class);
assertThrowsClassCastException(
() -> {
Object unused = (String) o;
},
String.class);
assertThrowsClassCastException(
() -> {
Object unused = (StringJsEnum & Comparable<StringJsEnum>) o;
},
Comparable.class);
if (!TestUtils.isWasm()) {
// TODO(b/353352388): The value field is not used in toString in Wasm.
assertTrue(v.toString().equals(HELLO_STRING));
assertTrue(o.toString().equals(HELLO_STRING));
}
}
@Wasm("nop") // Non-native JsMethod not supported in Wasm.
private static void testStringJsEnumAsSeenFromJs() {
assertTrue(asSeenFromJs(StringJsEnum.HELLO) == HELLO_STRING);
}
@JsMethod(name = "passThrough")
@Wasm("nop") // Non-native JsMethod not supported in Wasm.
private static native Object asSeenFromJs(StringJsEnum b);
private static boolean nonNativeClinitCalled = false;
@JsEnum
enum EnumWithClinit {
A;
static {
nonNativeClinitCalled = true;
}
int getValue() {
return ordinal();
}
}
@Wasm("nop") // In Wasm, there is no boxing logic and clinit is called for JsEnum value accesses.
private static void testJsEnumClassInitialization() {
assertFalse(nonNativeClinitCalled);
// Access to an enum value does not trigger clinit.
Object o = EnumWithClinit.A;
assertFalse(nonNativeClinitCalled);
// Cast and instanceof do not trigger clinit.
if (o instanceof EnumWithClinit) {
o = (EnumWithClinit) o;
}
assertFalse(nonNativeClinitCalled);
// Access to ordinal() does not trigger clinit.
int n = EnumWithClinit.A.ordinal();
assertFalse(nonNativeClinitCalled);
// Access to any devirtualized method triggers clinit.
EnumWithClinit.A.getValue();
assertTrue(nonNativeClinitCalled);
}
@Wasm("nop") // TODO(b/288145698): Support native JsEnum.
private static void testNativeEnumClassInitialization() {
assertFalse(nativeClinitCalled);
// Access to an enum value does not trigger clinit.
Object o = NativeEnumWithClinit.OK;
assertFalse(nativeClinitCalled);
// Cast does not trigger clinit.
o = (NativeEnumWithClinit) o;
assertFalse(nativeClinitCalled);
// Access to value does not trigger clinit.
String s = NativeEnumWithClinit.OK.value;
assertFalse(nativeClinitCalled);
// Access to any devirtualized method triggers clinit.
NativeEnumWithClinit.OK.getValue();
assertTrue(nativeClinitCalled);
}
@Wasm("nop") // TODO(b/182341814): DoNotAutobox not supported in Wasm.
private static void testDoNotAutoboxJsEnum() {
assertTrue(returnsObject(StringJsEnum.HELLO) == HELLO_STRING);
assertTrue(returnsObject(0, StringJsEnum.HELLO) == HELLO_STRING);
}
private static Object returnsObject(@DoNotAutobox Object object) {
return object;
}
private static Object returnsObject(int n, @DoNotAutobox Object... object) {
return object[0];
}
@Wasm("nop") // Unchecked cast not supported in Wasm.
private static void testUnckeckedCastJsEnum() {
StringJsEnum s = uncheckedCast(HELLO_STRING);
assertTrue(s == StringJsEnum.HELLO);
}
@UncheckedCast
private static <T> T uncheckedCast(@DoNotAutobox Object object) {
return (T) object;
}
private static void testReturnsAndParameters() {
assertTrue(PlainJsEnum.ONE == returnsJsEnum());
assertTrue(PlainJsEnum.ONE == returnsJsEnum(PlainJsEnum.ONE));
assertTrue(null == returnsNullJsEnum());
assertTrue(null == returnsJsEnum(null));
Main.<PlainJsEnum>testGenericAssertNull(null);
}
private static PlainJsEnum returnsJsEnum() {
return PlainJsEnum.ONE;
}
private static PlainJsEnum returnsJsEnum(PlainJsEnum value) {
return value;
}
private static PlainJsEnum returnsNullJsEnum() {
return null;
}
private static <T> void testGenericAssertNull(T obj) {
assertTrue(obj == null);
}
private static void testAutoBoxing_relationalOperations() {
PlainJsEnum one = PlainJsEnum.ONE;
Object boxedOne = PlainJsEnum.ONE;
assertTrue(one == boxingPassthrough(one));
assertTrue(boxedOne == boxingPassthrough(one));
assertTrue(boxingPassthrough(one) == one);
assertTrue(boxingPassthrough(one) == boxedOne);
assertFalse(one != boxedOne);
assertFalse(boxedOne != one);
assertFalse(one != boxingPassthrough(one));
assertFalse(boxedOne != boxingPassthrough(one));
assertFalse(boxingPassthrough(one) != one);
assertFalse(boxingPassthrough(one) != boxedOne);
// Comparison with a double object which is unboxed. Many of the comparisons, like
// `1.0 == PlainJsEnum.ONE` are rejected by the compiler due to type incompatibility.
assertFalse((Object) Double.valueOf(1.0) == PlainJsEnum.ONE);
assertFalse(Double.valueOf(1.0) == boxedOne);
assertThrowsClassCastException(
() -> {
boolean unused = 1.0 == (Double) boxedOne;
});
}
private static <T> T boxingPassthrough(T t) {
return t;
}
private static void testAutoBoxing_specialMethods() {
assertTrue(PlainJsEnum.ONE.equals(PlainJsEnum.ONE));
assertTrue(PlainJsEnum.ONE.compareTo(PlainJsEnum.ONE) == 0);
assertTrue(PlainJsEnum.ONE.compareTo(PlainJsEnum.ZERO) > 0);
assertTrue(PlainJsEnum.TWO.compareTo(PlainJsEnum.TEN) < 0);
PlainJsEnum jsEnum = PlainJsEnum.ONE;
PlainJsEnum nullJsEnum = null;
Object objectJsEnum = PlainJsEnum.ONE;
StringJsEnum stringJsEnum = StringJsEnum.HELLO;
PlainJsEnum nullStringJsEnum = null;
Object objectStringJsEnum = StringJsEnum.HELLO;
assertFalse(jsEnum.equals(PlainJsEnum.TWO));
assertTrue(jsEnum.equals(objectJsEnum));
assertFalse(jsEnum.equals(nullJsEnum));
assertFalse(jsEnum.equals(null));
assertFalse(stringJsEnum.equals(StringJsEnum.GOODBYE));
assertTrue(stringJsEnum.equals(objectStringJsEnum));
assertFalse(stringJsEnum.equals(nullJsEnum));
assertFalse(stringJsEnum.equals(null));
assertFalse(jsEnum.equals(stringJsEnum));
}
@Wasm("nop") // TODO(b/182341814, b/295235576): DoNotAutobox not supported in Wasm. JsEnum class
// literals not yet supported in Wasm.
private static void testAutoBoxing_intersectionCasts() {
Comparable c = (PlainJsEnum & Comparable<PlainJsEnum>) PlainJsEnum.ONE;
assertTrue(c.compareTo(PlainJsEnum.ZERO) > 0);
PlainJsEnum e = (PlainJsEnum & Comparable<PlainJsEnum>) PlainJsEnum.ONE;
// e correcly holds an unboxed value.
assertUnderlyingTypeEquals(Double.class, e);
assertTrue(PlainJsEnum.ONE == (PlainJsEnum & Comparable<PlainJsEnum>) PlainJsEnum.ONE);
// Intersection cast with a JsEnum does not unbox like the simple cast.
assertUnderlyingTypeEquals(
PlainJsEnum.class, (PlainJsEnum & Comparable<PlainJsEnum>) PlainJsEnum.ONE);
}
@Wasm("nop") // TODO(b/182341814, b/295235576): DoNotAutobox not supported in Wasm. JsEnum class
// literals not yet supported in Wasm.
private static void testAutoBoxing_typeInference() {
assertUnderlyingTypeEquals(Double.class, PlainJsEnum.ONE);
assertUnderlyingTypeEquals(PlainJsEnum.class, boxingIdentity(PlainJsEnum.ONE));
// Make sure the enum is boxed even when assigned to a field that is inferred to be JsEnum.
TemplatedField<PlainJsEnum> templatedField = new TemplatedField<PlainJsEnum>(PlainJsEnum.ONE);
PlainJsEnum unboxed = templatedField.getValue();
assertUnderlyingTypeEquals(Double.class, unboxed);
// Boxing through specialized method parameter assignment.
assertUnderlyingTypeEquals(PlainJsEnum.class, boxingIdentity(unboxed));
// Unboxing as a qualifier to ordinal.
assertUnderlyingTypeEquals(Double.class, templatedField.getValue().ordinal());
// Boxing through specialized method parameter assignment.
assertUnderlyingTypeEquals(PlainJsEnum.class, boxingIdentity(templatedField.getValue()));
// Checks what is actually returned by getValue().
assertUnderlyingTypeEquals(PlainJsEnum.class, ((TemplatedField) templatedField).getValue());
unboxed = templatedField.value;
assertUnderlyingTypeEquals(Double.class, unboxed);
templatedField.value = PlainJsEnum.ONE;
// Boxing through specialized method parameter assignment.
assertUnderlyingTypeEquals(PlainJsEnum.class, boxingIdentity(templatedField.value));
// Checks what is actually stored in value.
assertUnderlyingTypeEquals(PlainJsEnum.class, ((TemplatedField) templatedField).value);
// Unboxing as a qualifier to ordinal.
assertUnderlyingTypeEquals(Double.class, templatedField.value.ordinal());
// Boxing/unboxing in varargs.
List<?> list = Arrays.asList(PlainJsEnum.ONE);
assertUnderlyingTypeEquals(PlainJsEnum.class, list.get(0));
unboxed = (PlainJsEnum) list.get(0);
assertUnderlyingTypeEquals(Double.class, unboxed);
// TODO(b/118615488): Rewrite the following checks when JsEnum arrays are allowed.
// In Java the varargs array will be of the inferred argument type. Since non native JsEnum
// arrays are not allowed, the created array is of the declared type.
Object[] arr = varargsToComparableArray(PlainJsEnum.ONE);
assertUnderlyingTypeEquals(Comparable[].class, arr);
assertUnderlyingTypeEquals(PlainJsEnum.class, arr[0]);
arr = varargsToObjectArray(PlainJsEnum.ONE);
assertUnderlyingTypeEquals(Object[].class, arr);
assertUnderlyingTypeEquals(PlainJsEnum.class, arr[0]);
}
private static class TemplatedField<T> {
T value;
TemplatedField(T value) {
this.value = value;
}
T getValue() {
return this.value;
}
}
private static <T> Object boxingIdentity(T o) {
return o;
}
private static <T extends Comparable> Object[] varargsToComparableArray(T... elements) {
return elements;
}
private static <T> Object[] varargsToObjectArray(T... elements) {
return elements;
}
private static void testAutoBoxing_parameterizedLambda() {
Function<Object, Double> ordinalWithCast = e -> (double) ((PlainJsEnum) e).ordinal();
assertTrue(1 == ordinalWithCast.apply(PlainJsEnum.ONE));
Function<PlainJsEnum, Double> ordinal = e -> (double) e.ordinal();
assertTrue(1 == ordinal.apply(PlainJsEnum.ONE));
Function<? super PlainJsEnum, String> function =
e -> {
switch (e) {
case ONE:
return "ONE";
default:
return "None";
}
};
assertEquals("ONE", function.apply(PlainJsEnum.ONE));
Supplier<PlainJsEnum> supplier = () -> PlainJsEnum.ONE;
assertEquals(PlainJsEnum.ONE, supplier.get());
}
private static class Container<T> {
T field;
T get() {
return field;
}
void set(T t) {
field = t;
}
}
private static class PlainJsEnumContainer extends Container<PlainJsEnum> {
PlainJsEnum get() {
return super.get();
}
void set(PlainJsEnum plainJsEnum) {
super.set(plainJsEnum);
}
}
@JsType
private static class JsTypeContainer<T> {
private T field;
public T get() {
return field;
}
public void set(T t) {
field = t;
}
}
private static class JsTypePlainJsEnumContainer extends JsTypeContainer<PlainJsEnum> {
public PlainJsEnum get() {
return super.get();
}
public void set(PlainJsEnum plainJsEnum) {
super.set(plainJsEnum);
}
}
private static void testSpecializedSuperType() {
PlainJsEnum five = PlainJsEnum.FIVE;
PlainJsEnumContainer pc = new PlainJsEnumContainer();
Container<PlainJsEnum> c = pc;
pc.set(five);
assertTrue(five == pc.get());
assertTrue(five == ((Container<?>) c).get());
PlainJsEnum six = PlainJsEnum.SIX;
c.set(six);
assertTrue(six == pc.get());
assertTrue(six == ((Container<?>) c).get());
// assertUnderlyingTypeEquals(PlainJsEnum.class, ((Container<?>) c).get());
// assertUnderlyingTypeEquals(Double.class, pc.get());
JsTypePlainJsEnumContainer jpc = new JsTypePlainJsEnumContainer();
JsTypeContainer<PlainJsEnum> jc = jpc;
jpc.set(five);
assertTrue(five == jpc.get());
assertTrue(five == ((JsTypeContainer<?>) jc).get());
jc.set(six);
assertTrue(six == jpc.get());
assertTrue(six == ((JsTypeContainer<?>) jc).get());
// assertUnderlyingTypeEquals(PlainJsEnum.class, ((JsTypeContainer<?>) jc).get());
// assertUnderlyingTypeEquals(Double.class, jpc.get());
}
@Wasm("nop") // TODO(b/182341814, b/295235576): DoNotAutobox not supported in Wasm. JsEnum class
// literals not yet supported in Wasm.
private static void testSpecializedSuperTypeUnderlyingType() {
PlainJsEnum five = PlainJsEnum.FIVE;
PlainJsEnumContainer pc = new PlainJsEnumContainer();
Container<PlainJsEnum> c = pc;
pc.set(five);
assertUnderlyingTypeEquals(PlainJsEnum.class, ((Container<?>) c).get());
assertUnderlyingTypeEquals(Double.class, pc.get());
JsTypePlainJsEnumContainer jpc = new JsTypePlainJsEnumContainer();
JsTypeContainer<PlainJsEnum> jc = jpc;
jpc.set(five);
assertUnderlyingTypeEquals(PlainJsEnum.class, ((JsTypeContainer<?>) jc).get());
assertUnderlyingTypeEquals(Double.class, jpc.get());
}
@JsMethod
@Wasm("nop") // Non-native js methods not supported in Wasm.
// Pass through an enum value as if it were coming from and going to JavaScript.
private static Object passThrough(Object o) {
// Supported closure enums can only have number, boolean or string as their underlying type.
// Make sure that boxed enums are not passing though here.
assertTrue(o instanceof String || o instanceof Double || o instanceof Boolean);
return o;
}
private static void testBoxingPartialInlining() {
// TODO(b/315214896) Check the size difference to see if cases such as these take advantage of
// partial inlining in Wasm to turn this into a simple null check, avoiding boxing.
PlainJsEnum nonnullJsEnum = PlainJsEnum.ONE;
checkNotNull(nonnullJsEnum);
// Use the local so it doesn't get removed.
assertTrue(nonnullJsEnum == PlainJsEnum.ONE);
PlainJsEnum nullJsEnum = null;
assertThrowsNullPointerException(() -> checkNotNull(nullJsEnum));
assertTrue(nullJsEnum == null);
}
private static void checkNotNull(Object obj) {
if (obj == null) {
throw new NullPointerException();
}
}
private static void testNonNativeJsEnumArrays() {
PlainJsEnum[] arr = new PlainJsEnum[] {PlainJsEnum.THREE, PlainJsEnum.TWO};
assertTrue(arr.length == 2);
assertTrue(arr[0] == PlainJsEnum.THREE);
assertTrue(arr[1] == PlainJsEnum.TWO);
PlainJsEnum[] arr2 = new PlainJsEnum[2];
assertTrue(arr2.length == 2);
arr2[0] = PlainJsEnum.THREE;
arr2[1] = PlainJsEnum.TWO;
assertTrue(arr2[0] == PlainJsEnum.THREE);
assertTrue(arr2[1] == PlainJsEnum.TWO);
PlainJsEnum[] arrayWithNull = new PlainJsEnum[] {null};
assertTrue(arrayWithNull[0] == null);
PlainJsEnum[] arrayWithDefaults = new PlainJsEnum[1];
assertTrue(arrayWithDefaults[0] == null);
Object[] objArray = new Object[] {PlainJsEnum.ONE};
assertTrue(objArray[0] == PlainJsEnum.ONE);
List<PlainJsEnum> list = new ArrayList<PlainJsEnum>();
list.add(PlainJsEnum.ONE);
assertTrue(list.toArray()[0] == PlainJsEnum.ONE);
PlainJsEnum[][] nestedArr = new PlainJsEnum[][] {{PlainJsEnum.THREE}};
assertTrue(nestedArr.length == 1);
assertTrue(nestedArr[0].length == 1);
assertTrue(nestedArr[0][0] == PlainJsEnum.THREE);
nestedArr[0] = new PlainJsEnum[] {PlainJsEnum.TWO};
assertTrue(nestedArr[0][0] == PlainJsEnum.TWO);
}
private static void testNonNativeStringJsEnumArrays() {
StringJsEnum[] arr = new StringJsEnum[] {StringJsEnum.HELLO, StringJsEnum.GOODBYE};
assertTrue(arr.length == 2);
assertTrue(arr[0] == StringJsEnum.HELLO);
assertTrue(arr[1] == StringJsEnum.GOODBYE);
StringJsEnum[] arr2 = new StringJsEnum[2];
assertTrue(arr2.length == 2);
arr2[0] = StringJsEnum.HELLO;
arr2[1] = StringJsEnum.GOODBYE;
assertTrue(arr2[0] == StringJsEnum.HELLO);
assertTrue(arr2[1] == StringJsEnum.GOODBYE);
StringJsEnum[] arrayWithNull = new StringJsEnum[] {null};
assertTrue(arrayWithNull[0] == null);
StringJsEnum[] arrayWithDefaults = new StringJsEnum[1];
assertTrue(arrayWithDefaults[0] == null);
}
@Wasm("nop") // JsEnum boxing not implemented in Wasm.
private static void testNonNativeJsEnumArrayBoxing() {
// JsEnums are stored as unboxed in an array.
PlainJsEnum[] arr = new PlainJsEnum[] {PlainJsEnum.THREE};
assertUnderlyingTypeEquals(Double.class, arr[0]);
StringJsEnum[] arr2 = new StringJsEnum[] {StringJsEnum.HELLO};
assertUnderlyingTypeEquals(String.class, arr2[0]);
}
@Wasm("nop") // TODO(b/288145698): Support native JsEnum.
private static void testNativeJsEnumArray() {
NativeEnum[] arr = new NativeEnum[] {NativeEnum.ACCEPT, NativeEnum.CANCEL};
assertTrue(arr.length == 2);
assertTrue(arr[0] == NativeEnum.ACCEPT);
assertTrue(arr[1] == NativeEnum.CANCEL);
NativeEnum[] arr2 = new NativeEnum[2];
assertTrue(arr2.length == 2);
arr2[0] = NativeEnum.ACCEPT;
arr2[1] = NativeEnum.CANCEL;
assertTrue(arr2[0] == NativeEnum.ACCEPT);
assertTrue(arr2[1] == NativeEnum.CANCEL);
NativeEnum[] arrayWithNull = new NativeEnum[] {null};
assertTrue(arrayWithNull[0] == null);
NativeEnum[] arrayWithDefaults = new NativeEnum[1];
assertTrue(arrayWithDefaults[0] == null);
NativeEnum[][] nestedArr = new NativeEnum[][] {{NativeEnum.ACCEPT}};
assertTrue(nestedArr.length == 1);
assertTrue(nestedArr[0].length == 1);
assertTrue(nestedArr[0][0] == NativeEnum.ACCEPT);
nestedArr[0] = new NativeEnum[] {NativeEnum.CANCEL};
assertTrue(nestedArr[0][0] == NativeEnum.CANCEL);
}
private static void testJsEnumVarargs() {
checkTVarargs(PlainJsEnum.ONE);
checkJsEnumVarargs(PlainJsEnum.ONE);
DerivedWithoutJsEnumVarargs d = new DerivedWithoutJsEnumVarargs();
d.checkTVarargs(PlainJsEnum.ONE);
BaseWithTVarargs b = new DerivedWithoutJsEnumVarargs();
b.checkTVarargs(PlainJsEnum.ONE);
}
private static <T> void checkTVarargs(T... t) {
assertTrue(t[0] == PlainJsEnum.ONE);
}
private static void checkJsEnumVarargs(PlainJsEnum... t) {
assertTrue(t[0] == PlainJsEnum.ONE);
}
private static class BaseWithTVarargs<T> {
public void checkTVarargs(T... t) {
assertTrue(t[0] == PlainJsEnum.ONE);
}
}
private static class DerivedWithoutJsEnumVarargs extends BaseWithTVarargs<PlainJsEnum> {}
}
|
googleapis/google-cloud-java | 36,958 | java-chat/proto-google-cloud-chat-v1/src/main/java/com/google/chat/v1/ListCustomEmojisResponse.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/chat/v1/reaction.proto
// Protobuf Java Version: 3.25.8
package com.google.chat.v1;
/**
*
*
* <pre>
* A response to list custom emojis.
* </pre>
*
* Protobuf type {@code google.chat.v1.ListCustomEmojisResponse}
*/
public final class ListCustomEmojisResponse extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.chat.v1.ListCustomEmojisResponse)
ListCustomEmojisResponseOrBuilder {
private static final long serialVersionUID = 0L;
// Use ListCustomEmojisResponse.newBuilder() to construct.
private ListCustomEmojisResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private ListCustomEmojisResponse() {
customEmojis_ = java.util.Collections.emptyList();
nextPageToken_ = "";
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new ListCustomEmojisResponse();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.chat.v1.ReactionProto
.internal_static_google_chat_v1_ListCustomEmojisResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.chat.v1.ReactionProto
.internal_static_google_chat_v1_ListCustomEmojisResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.chat.v1.ListCustomEmojisResponse.class,
com.google.chat.v1.ListCustomEmojisResponse.Builder.class);
}
public static final int CUSTOM_EMOJIS_FIELD_NUMBER = 1;
@SuppressWarnings("serial")
private java.util.List<com.google.chat.v1.CustomEmoji> customEmojis_;
/**
*
*
* <pre>
* Unordered list. List of custom emojis.
* </pre>
*
* <code>
* repeated .google.chat.v1.CustomEmoji custom_emojis = 1 [(.google.api.field_behavior) = UNORDERED_LIST];
* </code>
*/
@java.lang.Override
public java.util.List<com.google.chat.v1.CustomEmoji> getCustomEmojisList() {
return customEmojis_;
}
/**
*
*
* <pre>
* Unordered list. List of custom emojis.
* </pre>
*
* <code>
* repeated .google.chat.v1.CustomEmoji custom_emojis = 1 [(.google.api.field_behavior) = UNORDERED_LIST];
* </code>
*/
@java.lang.Override
public java.util.List<? extends com.google.chat.v1.CustomEmojiOrBuilder>
getCustomEmojisOrBuilderList() {
return customEmojis_;
}
/**
*
*
* <pre>
* Unordered list. List of custom emojis.
* </pre>
*
* <code>
* repeated .google.chat.v1.CustomEmoji custom_emojis = 1 [(.google.api.field_behavior) = UNORDERED_LIST];
* </code>
*/
@java.lang.Override
public int getCustomEmojisCount() {
return customEmojis_.size();
}
/**
*
*
* <pre>
* Unordered list. List of custom emojis.
* </pre>
*
* <code>
* repeated .google.chat.v1.CustomEmoji custom_emojis = 1 [(.google.api.field_behavior) = UNORDERED_LIST];
* </code>
*/
@java.lang.Override
public com.google.chat.v1.CustomEmoji getCustomEmojis(int index) {
return customEmojis_.get(index);
}
/**
*
*
* <pre>
* Unordered list. List of custom emojis.
* </pre>
*
* <code>
* repeated .google.chat.v1.CustomEmoji custom_emojis = 1 [(.google.api.field_behavior) = UNORDERED_LIST];
* </code>
*/
@java.lang.Override
public com.google.chat.v1.CustomEmojiOrBuilder getCustomEmojisOrBuilder(int index) {
return customEmojis_.get(index);
}
public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private volatile java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that you can send as `pageToken` to retrieve the next page of
* results. If empty, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
@java.lang.Override
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
return (java.lang.String) ref;
} else {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
}
}
/**
*
*
* <pre>
* A token that you can send as `pageToken` to retrieve the next page of
* results. If empty, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
@java.lang.Override
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof java.lang.String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
for (int i = 0; i < customEmojis_.size(); i++) {
output.writeMessage(1, customEmojis_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_);
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
for (int i = 0; i < customEmojis_.size(); i++) {
size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, customEmojis_.get(i));
}
if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) {
size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_);
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.chat.v1.ListCustomEmojisResponse)) {
return super.equals(obj);
}
com.google.chat.v1.ListCustomEmojisResponse other =
(com.google.chat.v1.ListCustomEmojisResponse) obj;
if (!getCustomEmojisList().equals(other.getCustomEmojisList())) return false;
if (!getNextPageToken().equals(other.getNextPageToken())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
if (getCustomEmojisCount() > 0) {
hash = (37 * hash) + CUSTOM_EMOJIS_FIELD_NUMBER;
hash = (53 * hash) + getCustomEmojisList().hashCode();
}
hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER;
hash = (53 * hash) + getNextPageToken().hashCode();
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.chat.v1.ListCustomEmojisResponse parseFrom(java.nio.ByteBuffer data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.chat.v1.ListCustomEmojisResponse parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.chat.v1.ListCustomEmojisResponse parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.chat.v1.ListCustomEmojisResponse parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.chat.v1.ListCustomEmojisResponse parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.chat.v1.ListCustomEmojisResponse parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.chat.v1.ListCustomEmojisResponse parseFrom(java.io.InputStream input)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.chat.v1.ListCustomEmojisResponse parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.chat.v1.ListCustomEmojisResponse parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.chat.v1.ListCustomEmojisResponse parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.chat.v1.ListCustomEmojisResponse parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.chat.v1.ListCustomEmojisResponse parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.chat.v1.ListCustomEmojisResponse prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* A response to list custom emojis.
* </pre>
*
* Protobuf type {@code google.chat.v1.ListCustomEmojisResponse}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.chat.v1.ListCustomEmojisResponse)
com.google.chat.v1.ListCustomEmojisResponseOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.chat.v1.ReactionProto
.internal_static_google_chat_v1_ListCustomEmojisResponse_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.chat.v1.ReactionProto
.internal_static_google_chat_v1_ListCustomEmojisResponse_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.chat.v1.ListCustomEmojisResponse.class,
com.google.chat.v1.ListCustomEmojisResponse.Builder.class);
}
// Construct using com.google.chat.v1.ListCustomEmojisResponse.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
if (customEmojisBuilder_ == null) {
customEmojis_ = java.util.Collections.emptyList();
} else {
customEmojis_ = null;
customEmojisBuilder_.clear();
}
bitField0_ = (bitField0_ & ~0x00000001);
nextPageToken_ = "";
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.chat.v1.ReactionProto
.internal_static_google_chat_v1_ListCustomEmojisResponse_descriptor;
}
@java.lang.Override
public com.google.chat.v1.ListCustomEmojisResponse getDefaultInstanceForType() {
return com.google.chat.v1.ListCustomEmojisResponse.getDefaultInstance();
}
@java.lang.Override
public com.google.chat.v1.ListCustomEmojisResponse build() {
com.google.chat.v1.ListCustomEmojisResponse result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.chat.v1.ListCustomEmojisResponse buildPartial() {
com.google.chat.v1.ListCustomEmojisResponse result =
new com.google.chat.v1.ListCustomEmojisResponse(this);
buildPartialRepeatedFields(result);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartialRepeatedFields(com.google.chat.v1.ListCustomEmojisResponse result) {
if (customEmojisBuilder_ == null) {
if (((bitField0_ & 0x00000001) != 0)) {
customEmojis_ = java.util.Collections.unmodifiableList(customEmojis_);
bitField0_ = (bitField0_ & ~0x00000001);
}
result.customEmojis_ = customEmojis_;
} else {
result.customEmojis_ = customEmojisBuilder_.build();
}
}
private void buildPartial0(com.google.chat.v1.ListCustomEmojisResponse result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000002) != 0)) {
result.nextPageToken_ = nextPageToken_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.chat.v1.ListCustomEmojisResponse) {
return mergeFrom((com.google.chat.v1.ListCustomEmojisResponse) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.chat.v1.ListCustomEmojisResponse other) {
if (other == com.google.chat.v1.ListCustomEmojisResponse.getDefaultInstance()) return this;
if (customEmojisBuilder_ == null) {
if (!other.customEmojis_.isEmpty()) {
if (customEmojis_.isEmpty()) {
customEmojis_ = other.customEmojis_;
bitField0_ = (bitField0_ & ~0x00000001);
} else {
ensureCustomEmojisIsMutable();
customEmojis_.addAll(other.customEmojis_);
}
onChanged();
}
} else {
if (!other.customEmojis_.isEmpty()) {
if (customEmojisBuilder_.isEmpty()) {
customEmojisBuilder_.dispose();
customEmojisBuilder_ = null;
customEmojis_ = other.customEmojis_;
bitField0_ = (bitField0_ & ~0x00000001);
customEmojisBuilder_ =
com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders
? getCustomEmojisFieldBuilder()
: null;
} else {
customEmojisBuilder_.addAllMessages(other.customEmojis_);
}
}
}
if (!other.getNextPageToken().isEmpty()) {
nextPageToken_ = other.nextPageToken_;
bitField0_ |= 0x00000002;
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 10:
{
com.google.chat.v1.CustomEmoji m =
input.readMessage(com.google.chat.v1.CustomEmoji.parser(), extensionRegistry);
if (customEmojisBuilder_ == null) {
ensureCustomEmojisIsMutable();
customEmojis_.add(m);
} else {
customEmojisBuilder_.addMessage(m);
}
break;
} // case 10
case 18:
{
nextPageToken_ = input.readStringRequireUtf8();
bitField0_ |= 0x00000002;
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private java.util.List<com.google.chat.v1.CustomEmoji> customEmojis_ =
java.util.Collections.emptyList();
private void ensureCustomEmojisIsMutable() {
if (!((bitField0_ & 0x00000001) != 0)) {
customEmojis_ = new java.util.ArrayList<com.google.chat.v1.CustomEmoji>(customEmojis_);
bitField0_ |= 0x00000001;
}
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.chat.v1.CustomEmoji,
com.google.chat.v1.CustomEmoji.Builder,
com.google.chat.v1.CustomEmojiOrBuilder>
customEmojisBuilder_;
/**
*
*
* <pre>
* Unordered list. List of custom emojis.
* </pre>
*
* <code>
* repeated .google.chat.v1.CustomEmoji custom_emojis = 1 [(.google.api.field_behavior) = UNORDERED_LIST];
* </code>
*/
public java.util.List<com.google.chat.v1.CustomEmoji> getCustomEmojisList() {
if (customEmojisBuilder_ == null) {
return java.util.Collections.unmodifiableList(customEmojis_);
} else {
return customEmojisBuilder_.getMessageList();
}
}
/**
*
*
* <pre>
* Unordered list. List of custom emojis.
* </pre>
*
* <code>
* repeated .google.chat.v1.CustomEmoji custom_emojis = 1 [(.google.api.field_behavior) = UNORDERED_LIST];
* </code>
*/
public int getCustomEmojisCount() {
if (customEmojisBuilder_ == null) {
return customEmojis_.size();
} else {
return customEmojisBuilder_.getCount();
}
}
/**
*
*
* <pre>
* Unordered list. List of custom emojis.
* </pre>
*
* <code>
* repeated .google.chat.v1.CustomEmoji custom_emojis = 1 [(.google.api.field_behavior) = UNORDERED_LIST];
* </code>
*/
public com.google.chat.v1.CustomEmoji getCustomEmojis(int index) {
if (customEmojisBuilder_ == null) {
return customEmojis_.get(index);
} else {
return customEmojisBuilder_.getMessage(index);
}
}
/**
*
*
* <pre>
* Unordered list. List of custom emojis.
* </pre>
*
* <code>
* repeated .google.chat.v1.CustomEmoji custom_emojis = 1 [(.google.api.field_behavior) = UNORDERED_LIST];
* </code>
*/
public Builder setCustomEmojis(int index, com.google.chat.v1.CustomEmoji value) {
if (customEmojisBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCustomEmojisIsMutable();
customEmojis_.set(index, value);
onChanged();
} else {
customEmojisBuilder_.setMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Unordered list. List of custom emojis.
* </pre>
*
* <code>
* repeated .google.chat.v1.CustomEmoji custom_emojis = 1 [(.google.api.field_behavior) = UNORDERED_LIST];
* </code>
*/
public Builder setCustomEmojis(
int index, com.google.chat.v1.CustomEmoji.Builder builderForValue) {
if (customEmojisBuilder_ == null) {
ensureCustomEmojisIsMutable();
customEmojis_.set(index, builderForValue.build());
onChanged();
} else {
customEmojisBuilder_.setMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Unordered list. List of custom emojis.
* </pre>
*
* <code>
* repeated .google.chat.v1.CustomEmoji custom_emojis = 1 [(.google.api.field_behavior) = UNORDERED_LIST];
* </code>
*/
public Builder addCustomEmojis(com.google.chat.v1.CustomEmoji value) {
if (customEmojisBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCustomEmojisIsMutable();
customEmojis_.add(value);
onChanged();
} else {
customEmojisBuilder_.addMessage(value);
}
return this;
}
/**
*
*
* <pre>
* Unordered list. List of custom emojis.
* </pre>
*
* <code>
* repeated .google.chat.v1.CustomEmoji custom_emojis = 1 [(.google.api.field_behavior) = UNORDERED_LIST];
* </code>
*/
public Builder addCustomEmojis(int index, com.google.chat.v1.CustomEmoji value) {
if (customEmojisBuilder_ == null) {
if (value == null) {
throw new NullPointerException();
}
ensureCustomEmojisIsMutable();
customEmojis_.add(index, value);
onChanged();
} else {
customEmojisBuilder_.addMessage(index, value);
}
return this;
}
/**
*
*
* <pre>
* Unordered list. List of custom emojis.
* </pre>
*
* <code>
* repeated .google.chat.v1.CustomEmoji custom_emojis = 1 [(.google.api.field_behavior) = UNORDERED_LIST];
* </code>
*/
public Builder addCustomEmojis(com.google.chat.v1.CustomEmoji.Builder builderForValue) {
if (customEmojisBuilder_ == null) {
ensureCustomEmojisIsMutable();
customEmojis_.add(builderForValue.build());
onChanged();
} else {
customEmojisBuilder_.addMessage(builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Unordered list. List of custom emojis.
* </pre>
*
* <code>
* repeated .google.chat.v1.CustomEmoji custom_emojis = 1 [(.google.api.field_behavior) = UNORDERED_LIST];
* </code>
*/
public Builder addCustomEmojis(
int index, com.google.chat.v1.CustomEmoji.Builder builderForValue) {
if (customEmojisBuilder_ == null) {
ensureCustomEmojisIsMutable();
customEmojis_.add(index, builderForValue.build());
onChanged();
} else {
customEmojisBuilder_.addMessage(index, builderForValue.build());
}
return this;
}
/**
*
*
* <pre>
* Unordered list. List of custom emojis.
* </pre>
*
* <code>
* repeated .google.chat.v1.CustomEmoji custom_emojis = 1 [(.google.api.field_behavior) = UNORDERED_LIST];
* </code>
*/
public Builder addAllCustomEmojis(
java.lang.Iterable<? extends com.google.chat.v1.CustomEmoji> values) {
if (customEmojisBuilder_ == null) {
ensureCustomEmojisIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, customEmojis_);
onChanged();
} else {
customEmojisBuilder_.addAllMessages(values);
}
return this;
}
/**
*
*
* <pre>
* Unordered list. List of custom emojis.
* </pre>
*
* <code>
* repeated .google.chat.v1.CustomEmoji custom_emojis = 1 [(.google.api.field_behavior) = UNORDERED_LIST];
* </code>
*/
public Builder clearCustomEmojis() {
if (customEmojisBuilder_ == null) {
customEmojis_ = java.util.Collections.emptyList();
bitField0_ = (bitField0_ & ~0x00000001);
onChanged();
} else {
customEmojisBuilder_.clear();
}
return this;
}
/**
*
*
* <pre>
* Unordered list. List of custom emojis.
* </pre>
*
* <code>
* repeated .google.chat.v1.CustomEmoji custom_emojis = 1 [(.google.api.field_behavior) = UNORDERED_LIST];
* </code>
*/
public Builder removeCustomEmojis(int index) {
if (customEmojisBuilder_ == null) {
ensureCustomEmojisIsMutable();
customEmojis_.remove(index);
onChanged();
} else {
customEmojisBuilder_.remove(index);
}
return this;
}
/**
*
*
* <pre>
* Unordered list. List of custom emojis.
* </pre>
*
* <code>
* repeated .google.chat.v1.CustomEmoji custom_emojis = 1 [(.google.api.field_behavior) = UNORDERED_LIST];
* </code>
*/
public com.google.chat.v1.CustomEmoji.Builder getCustomEmojisBuilder(int index) {
return getCustomEmojisFieldBuilder().getBuilder(index);
}
/**
*
*
* <pre>
* Unordered list. List of custom emojis.
* </pre>
*
* <code>
* repeated .google.chat.v1.CustomEmoji custom_emojis = 1 [(.google.api.field_behavior) = UNORDERED_LIST];
* </code>
*/
public com.google.chat.v1.CustomEmojiOrBuilder getCustomEmojisOrBuilder(int index) {
if (customEmojisBuilder_ == null) {
return customEmojis_.get(index);
} else {
return customEmojisBuilder_.getMessageOrBuilder(index);
}
}
/**
*
*
* <pre>
* Unordered list. List of custom emojis.
* </pre>
*
* <code>
* repeated .google.chat.v1.CustomEmoji custom_emojis = 1 [(.google.api.field_behavior) = UNORDERED_LIST];
* </code>
*/
public java.util.List<? extends com.google.chat.v1.CustomEmojiOrBuilder>
getCustomEmojisOrBuilderList() {
if (customEmojisBuilder_ != null) {
return customEmojisBuilder_.getMessageOrBuilderList();
} else {
return java.util.Collections.unmodifiableList(customEmojis_);
}
}
/**
*
*
* <pre>
* Unordered list. List of custom emojis.
* </pre>
*
* <code>
* repeated .google.chat.v1.CustomEmoji custom_emojis = 1 [(.google.api.field_behavior) = UNORDERED_LIST];
* </code>
*/
public com.google.chat.v1.CustomEmoji.Builder addCustomEmojisBuilder() {
return getCustomEmojisFieldBuilder()
.addBuilder(com.google.chat.v1.CustomEmoji.getDefaultInstance());
}
/**
*
*
* <pre>
* Unordered list. List of custom emojis.
* </pre>
*
* <code>
* repeated .google.chat.v1.CustomEmoji custom_emojis = 1 [(.google.api.field_behavior) = UNORDERED_LIST];
* </code>
*/
public com.google.chat.v1.CustomEmoji.Builder addCustomEmojisBuilder(int index) {
return getCustomEmojisFieldBuilder()
.addBuilder(index, com.google.chat.v1.CustomEmoji.getDefaultInstance());
}
/**
*
*
* <pre>
* Unordered list. List of custom emojis.
* </pre>
*
* <code>
* repeated .google.chat.v1.CustomEmoji custom_emojis = 1 [(.google.api.field_behavior) = UNORDERED_LIST];
* </code>
*/
public java.util.List<com.google.chat.v1.CustomEmoji.Builder> getCustomEmojisBuilderList() {
return getCustomEmojisFieldBuilder().getBuilderList();
}
private com.google.protobuf.RepeatedFieldBuilderV3<
com.google.chat.v1.CustomEmoji,
com.google.chat.v1.CustomEmoji.Builder,
com.google.chat.v1.CustomEmojiOrBuilder>
getCustomEmojisFieldBuilder() {
if (customEmojisBuilder_ == null) {
customEmojisBuilder_ =
new com.google.protobuf.RepeatedFieldBuilderV3<
com.google.chat.v1.CustomEmoji,
com.google.chat.v1.CustomEmoji.Builder,
com.google.chat.v1.CustomEmojiOrBuilder>(
customEmojis_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean());
customEmojis_ = null;
}
return customEmojisBuilder_;
}
private java.lang.Object nextPageToken_ = "";
/**
*
*
* <pre>
* A token that you can send as `pageToken` to retrieve the next page of
* results. If empty, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The nextPageToken.
*/
public java.lang.String getNextPageToken() {
java.lang.Object ref = nextPageToken_;
if (!(ref instanceof java.lang.String)) {
com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref;
java.lang.String s = bs.toStringUtf8();
nextPageToken_ = s;
return s;
} else {
return (java.lang.String) ref;
}
}
/**
*
*
* <pre>
* A token that you can send as `pageToken` to retrieve the next page of
* results. If empty, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return The bytes for nextPageToken.
*/
public com.google.protobuf.ByteString getNextPageTokenBytes() {
java.lang.Object ref = nextPageToken_;
if (ref instanceof String) {
com.google.protobuf.ByteString b =
com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref);
nextPageToken_ = b;
return b;
} else {
return (com.google.protobuf.ByteString) ref;
}
}
/**
*
*
* <pre>
* A token that you can send as `pageToken` to retrieve the next page of
* results. If empty, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageToken(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that you can send as `pageToken` to retrieve the next page of
* results. If empty, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @return This builder for chaining.
*/
public Builder clearNextPageToken() {
nextPageToken_ = getDefaultInstance().getNextPageToken();
bitField0_ = (bitField0_ & ~0x00000002);
onChanged();
return this;
}
/**
*
*
* <pre>
* A token that you can send as `pageToken` to retrieve the next page of
* results. If empty, there are no subsequent pages.
* </pre>
*
* <code>string next_page_token = 2;</code>
*
* @param value The bytes for nextPageToken to set.
* @return This builder for chaining.
*/
public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
nextPageToken_ = value;
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.chat.v1.ListCustomEmojisResponse)
}
// @@protoc_insertion_point(class_scope:google.chat.v1.ListCustomEmojisResponse)
private static final com.google.chat.v1.ListCustomEmojisResponse DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.chat.v1.ListCustomEmojisResponse();
}
public static com.google.chat.v1.ListCustomEmojisResponse getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<ListCustomEmojisResponse> PARSER =
new com.google.protobuf.AbstractParser<ListCustomEmojisResponse>() {
@java.lang.Override
public ListCustomEmojisResponse parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<ListCustomEmojisResponse> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<ListCustomEmojisResponse> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.chat.v1.ListCustomEmojisResponse getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
googleapis/google-cloud-java | 35,953 | java-containeranalysis/proto-google-cloud-containeranalysis-v1beta1/src/main/java/io/grafeas/v1beta1/Grafeas.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/devtools/containeranalysis/v1beta1/grafeas/grafeas.proto
// Protobuf Java Version: 3.25.8
package io.grafeas.v1beta1;
public final class Grafeas {
private Grafeas() {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {}
public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) {
registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry);
}
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1beta1_Occurrence_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1beta1_Occurrence_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1beta1_Resource_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1beta1_Resource_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1beta1_Note_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1beta1_Note_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1beta1_GetOccurrenceRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1beta1_GetOccurrenceRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1beta1_ListOccurrencesRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1beta1_ListOccurrencesRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1beta1_ListOccurrencesResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1beta1_ListOccurrencesResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1beta1_DeleteOccurrenceRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1beta1_DeleteOccurrenceRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1beta1_CreateOccurrenceRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1beta1_CreateOccurrenceRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1beta1_UpdateOccurrenceRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1beta1_UpdateOccurrenceRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1beta1_GetNoteRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1beta1_GetNoteRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1beta1_GetOccurrenceNoteRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1beta1_GetOccurrenceNoteRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1beta1_ListNotesRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1beta1_ListNotesRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1beta1_ListNotesResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1beta1_ListNotesResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1beta1_DeleteNoteRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1beta1_DeleteNoteRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1beta1_CreateNoteRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1beta1_CreateNoteRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1beta1_UpdateNoteRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1beta1_UpdateNoteRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1beta1_ListNoteOccurrencesRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1beta1_ListNoteOccurrencesRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1beta1_ListNoteOccurrencesResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1beta1_ListNoteOccurrencesResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1beta1_BatchCreateNotesRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1beta1_BatchCreateNotesRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1beta1_BatchCreateNotesRequest_NotesEntry_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1beta1_BatchCreateNotesRequest_NotesEntry_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1beta1_BatchCreateNotesResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1beta1_BatchCreateNotesResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1beta1_BatchCreateOccurrencesRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1beta1_BatchCreateOccurrencesRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1beta1_BatchCreateOccurrencesResponse_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1beta1_BatchCreateOccurrencesResponse_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1beta1_GetVulnerabilityOccurrencesSummaryRequest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1beta1_GetVulnerabilityOccurrencesSummaryRequest_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1beta1_VulnerabilityOccurrencesSummary_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1beta1_VulnerabilityOccurrencesSummary_fieldAccessorTable;
static final com.google.protobuf.Descriptors.Descriptor
internal_static_grafeas_v1beta1_VulnerabilityOccurrencesSummary_FixableTotalByDigest_descriptor;
static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internal_static_grafeas_v1beta1_VulnerabilityOccurrencesSummary_FixableTotalByDigest_fieldAccessorTable;
public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() {
return descriptor;
}
private static com.google.protobuf.Descriptors.FileDescriptor descriptor;
static {
java.lang.String[] descriptorData = {
"\n"
+ "?google/devtools/containeranalysis/v1beta1/grafeas/grafeas.proto\022\017grafeas.v1bet"
+ "a1\032\034google/api/annotations.proto\032\027google"
+ "/api/client.proto\032Ggoogle/devtools/containeranalysis/v1beta1/attestation/attesta"
+ "tion.proto\032;google/devtools/containeranalysis/v1beta1/build/build.proto\032=google/"
+ "devtools/containeranalysis/v1beta1/common/common.proto\032Egoogle/devtools/containe"
+ "ranalysis/v1beta1/deployment/deployment.proto\032Cgoogle/devtools/containeranalysis"
+ "/v1beta1/discovery/discovery.proto\032;google/devtools/containeranalysis/v1beta1/im"
+ "age/image.proto\032?google/devtools/containeranalysis/v1beta1/package/package.proto"
+ "\032Egoogle/devtools/containeranalysis/v1beta1/provenance/provenance.proto\032Kgoogle/"
+ "devtools/containeranalysis/v1beta1/vulne"
+ "rability/vulnerability.proto\032\033google/protobuf/empty.proto\032"
+ " google/protobuf/field_mask.proto\032\037google/protobuf/timestamp.proto\"\234\005\n\n"
+ "Occurrence\022\014\n"
+ "\004name\030\001 \001(\t\022+\n"
+ "\010resource\030\002 \001(\0132\031.grafeas.v1beta1.Resource\022\021\n"
+ "\tnote_name\030\003 \001(\t\022\'\n"
+ "\004kind\030\004 \001(\0162\031.grafeas.v1beta1.NoteKind\022\023\n"
+ "\013remediation\030\005 \001(\t\022/\n"
+ "\013create_time\030\006 \001(\0132\032.google.protobuf.Timestamp\022/\n"
+ "\013update_time\030\007 \001(\0132\032.google.protobuf.Timestamp\022?\n\r"
+ "vulnerability\030\010 \001(\0132&.grafeas.v1beta1.vulnerability.DetailsH\000\022/\n"
+ "\005build\030\t \001(\0132\036.grafeas.v1beta1.build.DetailsH\000\0227\n\r"
+ "derived_image\030\n"
+ " \001(\0132\036.grafeas.v1beta1.image.DetailsH\000\0228\n"
+ "\014installation\030\013 \001(\0132 .grafeas.v1beta1.package.DetailsH\000\0229\n\n"
+ "deployment\030\014 \001(\0132#.grafeas.v1beta1.deployment.DetailsH\000\0228\n\n"
+ "discovered\030\r"
+ " \001(\0132\".grafeas.v1beta1.discovery.DetailsH\000\022;\n"
+ "\013attestation\030\016 \001(\0132$.grafeas.v1beta1.attestation.DetailsH\000B\t\n"
+ "\007details\"]\n"
+ "\010Resource\022\014\n"
+ "\004name\030\001 \001(\t\022\013\n"
+ "\003uri\030\002 \001(\t\0226\n"
+ "\014content_hash\030\003 \001(\0132 .grafeas.v1beta1.provenance.Hash\"\200\006\n"
+ "\004Note\022\014\n"
+ "\004name\030\001 \001(\t\022\031\n"
+ "\021short_description\030\002 \001(\t\022\030\n"
+ "\020long_description\030\003 \001(\t\022\'\n"
+ "\004kind\030\004 \001(\0162\031.grafeas.v1beta1.NoteKind\0220\n"
+ "\013related_url\030\005 \003(\0132\033.grafeas.v1beta1.RelatedUrl\0223\n"
+ "\017expiration_time\030\006 \001(\0132\032.google.protobuf.Timestamp\022/\n"
+ "\013create_time\030\007 \001(\0132\032.google.protobuf.Timestamp\022/\n"
+ "\013update_time\030\010 \001(\0132\032.google.protobuf.Timestamp\022\032\n"
+ "\022related_note_names\030\t \003(\t\022E\n\r"
+ "vulnerability\030\n"
+ " \001(\0132,.grafeas.v1beta1.vulnerability.VulnerabilityH\000\022-\n"
+ "\005build\030\013 \001(\0132\034.grafeas.v1beta1.build.BuildH\000\0222\n\n"
+ "base_image\030\014 \001(\0132\034.grafeas.v1beta1.image.BasisH\000\0223\n"
+ "\007package\030\r"
+ " \001(\0132 .grafeas.v1beta1.package.PackageH\000\022<\n\n"
+ "deployable\030\016 \001(\0132&.grafeas.v1beta1.deployment.DeployableH\000\0229\n"
+ "\tdiscovery\030\017 \001(\0132$.grafeas.v1beta1.discovery.DiscoveryH\000\022G\n"
+ "\025attestation_authority\030\020"
+ " \001(\0132&.grafeas.v1beta1.attestation.AuthorityH\000B\006\n"
+ "\004type\"$\n"
+ "\024GetOccurrenceRequest\022\014\n"
+ "\004name\030\001 \001(\t\"_\n"
+ "\026ListOccurrencesRequest\022\016\n"
+ "\006parent\030\001 \001(\t\022\016\n"
+ "\006filter\030\002 \001(\t\022\021\n"
+ "\tpage_size\030\003 \001(\005\022\022\n\n"
+ "page_token\030\004 \001(\t\"d\n"
+ "\027ListOccurrencesResponse\0220\n"
+ "\013occurrences\030\001 \003(\0132\033.grafeas.v1beta1.Occurrence\022\027\n"
+ "\017next_page_token\030\002 \001(\t\"\'\n"
+ "\027DeleteOccurrenceRequest\022\014\n"
+ "\004name\030\001 \001(\t\"Z\n"
+ "\027CreateOccurrenceRequest\022\016\n"
+ "\006parent\030\001 \001(\t\022/\n\n"
+ "occurrence\030\002 \001(\0132\033.grafeas.v1beta1.Occurrence\"\211\001\n"
+ "\027UpdateOccurrenceRequest\022\014\n"
+ "\004name\030\001 \001(\t\022/\n\n"
+ "occurrence\030\002 \001(\0132\033.grafeas.v1beta1.Occurrence\022/\n"
+ "\013update_mask\030\003 \001(\0132\032.google.protobuf.FieldMask\"\036\n"
+ "\016GetNoteRequest\022\014\n"
+ "\004name\030\001 \001(\t\"(\n"
+ "\030GetOccurrenceNoteRequest\022\014\n"
+ "\004name\030\001 \001(\t\"Y\n"
+ "\020ListNotesRequest\022\016\n"
+ "\006parent\030\001 \001(\t\022\016\n"
+ "\006filter\030\002 \001(\t\022\021\n"
+ "\tpage_size\030\003 \001(\005\022\022\n\n"
+ "page_token\030\004 \001(\t\"R\n"
+ "\021ListNotesResponse\022$\n"
+ "\005notes\030\001 \003(\0132\025.grafeas.v1beta1.Note\022\027\n"
+ "\017next_page_token\030\002 \001(\t\"!\n"
+ "\021DeleteNoteRequest\022\014\n"
+ "\004name\030\001 \001(\t\"Y\n"
+ "\021CreateNoteRequest\022\016\n"
+ "\006parent\030\001 \001(\t\022\017\n"
+ "\007note_id\030\002 \001(\t\022#\n"
+ "\004note\030\003 \001(\0132\025.grafeas.v1beta1.Note\"w\n"
+ "\021UpdateNoteRequest\022\014\n"
+ "\004name\030\001 \001(\t\022#\n"
+ "\004note\030\002 \001(\0132\025.grafeas.v1beta1.Note\022/\n"
+ "\013update_mask\030\003 \001(\0132\032.google.protobuf.FieldMask\"a\n"
+ "\032ListNoteOccurrencesRequest\022\014\n"
+ "\004name\030\001 \001(\t\022\016\n"
+ "\006filter\030\002 \001(\t\022\021\n"
+ "\tpage_size\030\003 \001(\005\022\022\n\n"
+ "page_token\030\004 \001(\t\"h\n"
+ "\033ListNoteOccurrencesResponse\0220\n"
+ "\013occurrences\030\001 \003(\0132\033.grafeas.v1beta1.Occurrence\022\027\n"
+ "\017next_page_token\030\002 \001(\t\"\262\001\n"
+ "\027BatchCreateNotesRequest\022\016\n"
+ "\006parent\030\001 \001(\t\022B\n"
+ "\005notes\030\002"
+ " \003(\01323.grafeas.v1beta1.BatchCreateNotesRequest.NotesEntry\032C\n\n"
+ "NotesEntry\022\013\n"
+ "\003key\030\001 \001(\t\022$\n"
+ "\005value\030\002 \001(\0132\025.grafeas.v1beta1.Note:\0028\001\"@\n"
+ "\030BatchCreateNotesResponse\022$\n"
+ "\005notes\030\001 \003(\0132\025.grafeas.v1beta1.Note\"a\n"
+ "\035BatchCreateOccurrencesRequest\022\016\n"
+ "\006parent\030\001 \001(\t\0220\n"
+ "\013occurrences\030\002 \003(\0132\033.grafeas.v1beta1.Occurrence\"R\n"
+ "\036BatchCreateOccurrencesResponse\0220\n"
+ "\013occurrences\030\001 \003(\0132\033.grafeas.v1beta1.Occurrence\"K\n"
+ ")GetVulnerabilityOccurrencesSummaryRequest\022\016\n"
+ "\006parent\030\001 \001(\t\022\016\n"
+ "\006filter\030\002 \001(\t\"\245\002\n"
+ "\037VulnerabilityOccurrencesSummary\022U\n"
+ "\006counts\030\001 \003(\0132E.grafea"
+ "s.v1beta1.VulnerabilityOccurrencesSummary.FixableTotalByDigest\032\252\001\n"
+ "\024FixableTotalByDigest\022+\n"
+ "\010resource\030\001 \001(\0132\031.grafeas.v1beta1.Resource\0229\n"
+ "\010severity\030\002 \001(\0162\'.grafeas.v1beta1.vulnerability.Severity\022\025\n\r"
+ "fixable_count\030\003 \001(\003\022\023\n"
+ "\013total_count\030\004 \001(\0032\230\022\n"
+ "\016GrafeasV1Beta1\022\205\001\n\r"
+ "GetOccurrence\022%.grafeas.v1beta1.GetOccurrenceRequest\032\033.grafea"
+ "s.v1beta1.Occurrence\"0\202\323\344\223\002*\022(/v1beta1/{name=projects/*/occurrences/*}\022\226\001\n"
+ "\017ListOccurrences\022\'.grafeas.v1beta1.ListOccurre"
+ "ncesRequest\032(.grafeas.v1beta1.ListOccurr"
+ "encesResponse\"0\202\323\344\223\002*\022(/v1beta1/{parent=projects/*}/occurrences\022\206\001\n"
+ "\020DeleteOccurrence\022(.grafeas.v1beta1.DeleteOccurrenceR"
+ "equest\032\026.google.protobuf.Empty\"0\202\323\344\223\002**("
+ "/v1beta1/{name=projects/*/occurrences/*}\022\227\001\n"
+ "\020CreateOccurrence\022(.grafeas.v1beta1."
+ "CreateOccurrenceRequest\032\033.grafeas.v1beta"
+ "1.Occurrence\"<\202\323\344\223\0026\"(/v1beta1/{parent=projects/*}/occurrences:\n"
+ "occurrence\022\272\001\n"
+ "\026BatchCreateOccurrences\022..grafeas.v1beta1."
+ "BatchCreateOccurrencesRequest\032/.grafeas.v1beta1.BatchCreateOccurrencesResponse\"?"
+ "\202\323\344\223\0029\"4/v1beta1/{parent=projects/*}/occurrences:batchCreate:\001*\022\227\001\n"
+ "\020UpdateOccurrence\022(.grafeas.v1beta1.UpdateOccurrenceR"
+ "equest\032\033.grafeas.v1beta1.Occurrence\"<\202\323\344"
+ "\223\00262(/v1beta1/{name=projects/*/occurrences/*}:\n"
+ "occurrence\022\215\001\n"
+ "\021GetOccurrenceNote\022).grafeas.v1beta1.GetOccurrenceNoteReque"
+ "st\032\025.grafeas.v1beta1.Note\"6\202\323\344\223\0020\022./v1be"
+ "ta1/{name=projects/*/occurrences/*}/notes\022m\n"
+ "\007GetNote\022\037.grafeas.v1beta1.GetNoteRe"
+ "quest\032\025.grafeas.v1beta1.Note\"*\202\323\344\223\002$\022\"/v1beta1/{name=projects/*/notes/*}\022~\n"
+ "\tListNotes\022!.grafeas.v1beta1.ListNotesRequest"
+ "\032\".grafeas.v1beta1.ListNotesResponse\"*\202\323"
+ "\344\223\002$\022\"/v1beta1/{parent=projects/*}/notes\022t\n\n"
+ "DeleteNote\022\".grafeas.v1beta1.DeleteN"
+ "oteRequest\032\026.google.protobuf.Empty\"*\202\323\344\223"
+ "\002$*\"/v1beta1/{name=projects/*/notes/*}\022y\n\n"
+ "CreateNote\022\".grafeas.v1beta1.CreateNot"
+ "eRequest\032\025.grafeas.v1beta1.Note\"0\202\323\344\223\002*\""
+ "\"/v1beta1/{parent=projects/*}/notes:\004note\022\242\001\n"
+ "\020BatchCreateNotes\022(.grafeas.v1beta1.BatchCreateNotesRequest\032).grafeas.v1bet"
+ "a1.BatchCreateNotesResponse\"9\202\323\344\223\0023\"./v1"
+ "beta1/{parent=projects/*}/notes:batchCreate:\001*\022y\n\n"
+ "UpdateNote\022\".grafeas.v1beta1.UpdateNoteRequest\032\025.grafeas.v1beta1.Note\""
+ "0\202\323\344\223\002*2\"/v1beta1/{name=projects/*/notes/*}:\004note\022\250\001\n"
+ "\023ListNoteOccurrences\022+.grafeas.v1beta1.ListNoteOccurrencesRequest\032,"
+ ".grafeas.v1beta1.ListNoteOccurrencesResp"
+ "onse\"6\202\323\344\223\0020\022./v1beta1/{name=projects/*/notes/*}/occurrences\022\331\001\n"
+ "\"GetVulnerabilityOccurrencesSummary\022:.grafeas.v1beta1.Ge"
+ "tVulnerabilityOccurrencesSummaryRequest\0320.grafeas.v1beta1.VulnerabilityOccurrenc"
+ "esSummary\"E\202\323\344\223\002?\022=/v1beta1/{parent=proj"
+ "ects/*}/occurrences:vulnerabilitySummary\032T\312A"
+ " containeranalysis.googleapis.com\322A."
+ "https://www.googleapis.com/auth/cloud-platformBv\n"
+ "\022io.grafeas.v1beta1P\001ZXgoogle.golang.org/genproto/googleapis/devtools/c"
+ "ontaineranalysis/v1beta1/grafeas;grafeas\242\002\003GRAb\006proto3"
};
descriptor =
com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom(
descriptorData,
new com.google.protobuf.Descriptors.FileDescriptor[] {
com.google.api.AnnotationsProto.getDescriptor(),
com.google.api.ClientProto.getDescriptor(),
io.grafeas.v1beta1.attestation.AttestationOuterClass.getDescriptor(),
io.grafeas.v1beta1.build.BuildOuterClass.getDescriptor(),
io.grafeas.v1beta1.common.Common.getDescriptor(),
io.grafeas.v1beta1.deployment.DeploymentOuterClass.getDescriptor(),
io.grafeas.v1beta1.discovery.DiscoveryOuterClass.getDescriptor(),
io.grafeas.v1beta1.image.Image.getDescriptor(),
io.grafeas.v1beta1.pkg.PackageOuterClass.getDescriptor(),
io.grafeas.v1beta1.provenance.Provenance.getDescriptor(),
io.grafeas.v1beta1.vulnerability.VulnerabilityOuterClass.getDescriptor(),
com.google.protobuf.EmptyProto.getDescriptor(),
com.google.protobuf.FieldMaskProto.getDescriptor(),
com.google.protobuf.TimestampProto.getDescriptor(),
});
internal_static_grafeas_v1beta1_Occurrence_descriptor =
getDescriptor().getMessageTypes().get(0);
internal_static_grafeas_v1beta1_Occurrence_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1beta1_Occurrence_descriptor,
new java.lang.String[] {
"Name",
"Resource",
"NoteName",
"Kind",
"Remediation",
"CreateTime",
"UpdateTime",
"Vulnerability",
"Build",
"DerivedImage",
"Installation",
"Deployment",
"Discovered",
"Attestation",
"Details",
});
internal_static_grafeas_v1beta1_Resource_descriptor = getDescriptor().getMessageTypes().get(1);
internal_static_grafeas_v1beta1_Resource_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1beta1_Resource_descriptor,
new java.lang.String[] {
"Name", "Uri", "ContentHash",
});
internal_static_grafeas_v1beta1_Note_descriptor = getDescriptor().getMessageTypes().get(2);
internal_static_grafeas_v1beta1_Note_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1beta1_Note_descriptor,
new java.lang.String[] {
"Name",
"ShortDescription",
"LongDescription",
"Kind",
"RelatedUrl",
"ExpirationTime",
"CreateTime",
"UpdateTime",
"RelatedNoteNames",
"Vulnerability",
"Build",
"BaseImage",
"Package",
"Deployable",
"Discovery",
"AttestationAuthority",
"Type",
});
internal_static_grafeas_v1beta1_GetOccurrenceRequest_descriptor =
getDescriptor().getMessageTypes().get(3);
internal_static_grafeas_v1beta1_GetOccurrenceRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1beta1_GetOccurrenceRequest_descriptor,
new java.lang.String[] {
"Name",
});
internal_static_grafeas_v1beta1_ListOccurrencesRequest_descriptor =
getDescriptor().getMessageTypes().get(4);
internal_static_grafeas_v1beta1_ListOccurrencesRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1beta1_ListOccurrencesRequest_descriptor,
new java.lang.String[] {
"Parent", "Filter", "PageSize", "PageToken",
});
internal_static_grafeas_v1beta1_ListOccurrencesResponse_descriptor =
getDescriptor().getMessageTypes().get(5);
internal_static_grafeas_v1beta1_ListOccurrencesResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1beta1_ListOccurrencesResponse_descriptor,
new java.lang.String[] {
"Occurrences", "NextPageToken",
});
internal_static_grafeas_v1beta1_DeleteOccurrenceRequest_descriptor =
getDescriptor().getMessageTypes().get(6);
internal_static_grafeas_v1beta1_DeleteOccurrenceRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1beta1_DeleteOccurrenceRequest_descriptor,
new java.lang.String[] {
"Name",
});
internal_static_grafeas_v1beta1_CreateOccurrenceRequest_descriptor =
getDescriptor().getMessageTypes().get(7);
internal_static_grafeas_v1beta1_CreateOccurrenceRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1beta1_CreateOccurrenceRequest_descriptor,
new java.lang.String[] {
"Parent", "Occurrence",
});
internal_static_grafeas_v1beta1_UpdateOccurrenceRequest_descriptor =
getDescriptor().getMessageTypes().get(8);
internal_static_grafeas_v1beta1_UpdateOccurrenceRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1beta1_UpdateOccurrenceRequest_descriptor,
new java.lang.String[] {
"Name", "Occurrence", "UpdateMask",
});
internal_static_grafeas_v1beta1_GetNoteRequest_descriptor =
getDescriptor().getMessageTypes().get(9);
internal_static_grafeas_v1beta1_GetNoteRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1beta1_GetNoteRequest_descriptor,
new java.lang.String[] {
"Name",
});
internal_static_grafeas_v1beta1_GetOccurrenceNoteRequest_descriptor =
getDescriptor().getMessageTypes().get(10);
internal_static_grafeas_v1beta1_GetOccurrenceNoteRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1beta1_GetOccurrenceNoteRequest_descriptor,
new java.lang.String[] {
"Name",
});
internal_static_grafeas_v1beta1_ListNotesRequest_descriptor =
getDescriptor().getMessageTypes().get(11);
internal_static_grafeas_v1beta1_ListNotesRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1beta1_ListNotesRequest_descriptor,
new java.lang.String[] {
"Parent", "Filter", "PageSize", "PageToken",
});
internal_static_grafeas_v1beta1_ListNotesResponse_descriptor =
getDescriptor().getMessageTypes().get(12);
internal_static_grafeas_v1beta1_ListNotesResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1beta1_ListNotesResponse_descriptor,
new java.lang.String[] {
"Notes", "NextPageToken",
});
internal_static_grafeas_v1beta1_DeleteNoteRequest_descriptor =
getDescriptor().getMessageTypes().get(13);
internal_static_grafeas_v1beta1_DeleteNoteRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1beta1_DeleteNoteRequest_descriptor,
new java.lang.String[] {
"Name",
});
internal_static_grafeas_v1beta1_CreateNoteRequest_descriptor =
getDescriptor().getMessageTypes().get(14);
internal_static_grafeas_v1beta1_CreateNoteRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1beta1_CreateNoteRequest_descriptor,
new java.lang.String[] {
"Parent", "NoteId", "Note",
});
internal_static_grafeas_v1beta1_UpdateNoteRequest_descriptor =
getDescriptor().getMessageTypes().get(15);
internal_static_grafeas_v1beta1_UpdateNoteRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1beta1_UpdateNoteRequest_descriptor,
new java.lang.String[] {
"Name", "Note", "UpdateMask",
});
internal_static_grafeas_v1beta1_ListNoteOccurrencesRequest_descriptor =
getDescriptor().getMessageTypes().get(16);
internal_static_grafeas_v1beta1_ListNoteOccurrencesRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1beta1_ListNoteOccurrencesRequest_descriptor,
new java.lang.String[] {
"Name", "Filter", "PageSize", "PageToken",
});
internal_static_grafeas_v1beta1_ListNoteOccurrencesResponse_descriptor =
getDescriptor().getMessageTypes().get(17);
internal_static_grafeas_v1beta1_ListNoteOccurrencesResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1beta1_ListNoteOccurrencesResponse_descriptor,
new java.lang.String[] {
"Occurrences", "NextPageToken",
});
internal_static_grafeas_v1beta1_BatchCreateNotesRequest_descriptor =
getDescriptor().getMessageTypes().get(18);
internal_static_grafeas_v1beta1_BatchCreateNotesRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1beta1_BatchCreateNotesRequest_descriptor,
new java.lang.String[] {
"Parent", "Notes",
});
internal_static_grafeas_v1beta1_BatchCreateNotesRequest_NotesEntry_descriptor =
internal_static_grafeas_v1beta1_BatchCreateNotesRequest_descriptor.getNestedTypes().get(0);
internal_static_grafeas_v1beta1_BatchCreateNotesRequest_NotesEntry_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1beta1_BatchCreateNotesRequest_NotesEntry_descriptor,
new java.lang.String[] {
"Key", "Value",
});
internal_static_grafeas_v1beta1_BatchCreateNotesResponse_descriptor =
getDescriptor().getMessageTypes().get(19);
internal_static_grafeas_v1beta1_BatchCreateNotesResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1beta1_BatchCreateNotesResponse_descriptor,
new java.lang.String[] {
"Notes",
});
internal_static_grafeas_v1beta1_BatchCreateOccurrencesRequest_descriptor =
getDescriptor().getMessageTypes().get(20);
internal_static_grafeas_v1beta1_BatchCreateOccurrencesRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1beta1_BatchCreateOccurrencesRequest_descriptor,
new java.lang.String[] {
"Parent", "Occurrences",
});
internal_static_grafeas_v1beta1_BatchCreateOccurrencesResponse_descriptor =
getDescriptor().getMessageTypes().get(21);
internal_static_grafeas_v1beta1_BatchCreateOccurrencesResponse_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1beta1_BatchCreateOccurrencesResponse_descriptor,
new java.lang.String[] {
"Occurrences",
});
internal_static_grafeas_v1beta1_GetVulnerabilityOccurrencesSummaryRequest_descriptor =
getDescriptor().getMessageTypes().get(22);
internal_static_grafeas_v1beta1_GetVulnerabilityOccurrencesSummaryRequest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1beta1_GetVulnerabilityOccurrencesSummaryRequest_descriptor,
new java.lang.String[] {
"Parent", "Filter",
});
internal_static_grafeas_v1beta1_VulnerabilityOccurrencesSummary_descriptor =
getDescriptor().getMessageTypes().get(23);
internal_static_grafeas_v1beta1_VulnerabilityOccurrencesSummary_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1beta1_VulnerabilityOccurrencesSummary_descriptor,
new java.lang.String[] {
"Counts",
});
internal_static_grafeas_v1beta1_VulnerabilityOccurrencesSummary_FixableTotalByDigest_descriptor =
internal_static_grafeas_v1beta1_VulnerabilityOccurrencesSummary_descriptor
.getNestedTypes()
.get(0);
internal_static_grafeas_v1beta1_VulnerabilityOccurrencesSummary_FixableTotalByDigest_fieldAccessorTable =
new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable(
internal_static_grafeas_v1beta1_VulnerabilityOccurrencesSummary_FixableTotalByDigest_descriptor,
new java.lang.String[] {
"Resource", "Severity", "FixableCount", "TotalCount",
});
com.google.protobuf.ExtensionRegistry registry =
com.google.protobuf.ExtensionRegistry.newInstance();
registry.add(com.google.api.ClientProto.defaultHost);
registry.add(com.google.api.AnnotationsProto.http);
registry.add(com.google.api.ClientProto.oauthScopes);
com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor(
descriptor, registry);
com.google.api.AnnotationsProto.getDescriptor();
com.google.api.ClientProto.getDescriptor();
io.grafeas.v1beta1.attestation.AttestationOuterClass.getDescriptor();
io.grafeas.v1beta1.build.BuildOuterClass.getDescriptor();
io.grafeas.v1beta1.common.Common.getDescriptor();
io.grafeas.v1beta1.deployment.DeploymentOuterClass.getDescriptor();
io.grafeas.v1beta1.discovery.DiscoveryOuterClass.getDescriptor();
io.grafeas.v1beta1.image.Image.getDescriptor();
io.grafeas.v1beta1.pkg.PackageOuterClass.getDescriptor();
io.grafeas.v1beta1.provenance.Provenance.getDescriptor();
io.grafeas.v1beta1.vulnerability.VulnerabilityOuterClass.getDescriptor();
com.google.protobuf.EmptyProto.getDescriptor();
com.google.protobuf.FieldMaskProto.getDescriptor();
com.google.protobuf.TimestampProto.getDescriptor();
}
// @@protoc_insertion_point(outer_class_scope)
}
|
googleapis/google-cloud-java | 36,984 | java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/FunctionCallingConfig.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
// Generated by the protocol buffer compiler. DO NOT EDIT!
// source: google/cloud/aiplatform/v1/tool.proto
// Protobuf Java Version: 3.25.8
package com.google.cloud.aiplatform.v1;
/**
*
*
* <pre>
* Function calling config.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.FunctionCallingConfig}
*/
public final class FunctionCallingConfig extends com.google.protobuf.GeneratedMessageV3
implements
// @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.FunctionCallingConfig)
FunctionCallingConfigOrBuilder {
private static final long serialVersionUID = 0L;
// Use FunctionCallingConfig.newBuilder() to construct.
private FunctionCallingConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) {
super(builder);
}
private FunctionCallingConfig() {
mode_ = 0;
allowedFunctionNames_ = com.google.protobuf.LazyStringArrayList.emptyList();
}
@java.lang.Override
@SuppressWarnings({"unused"})
protected java.lang.Object newInstance(UnusedPrivateParameter unused) {
return new FunctionCallingConfig();
}
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.ToolProto
.internal_static_google_cloud_aiplatform_v1_FunctionCallingConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.ToolProto
.internal_static_google_cloud_aiplatform_v1_FunctionCallingConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.FunctionCallingConfig.class,
com.google.cloud.aiplatform.v1.FunctionCallingConfig.Builder.class);
}
/**
*
*
* <pre>
* Function calling mode.
* </pre>
*
* Protobuf enum {@code google.cloud.aiplatform.v1.FunctionCallingConfig.Mode}
*/
public enum Mode implements com.google.protobuf.ProtocolMessageEnum {
/**
*
*
* <pre>
* Unspecified function calling mode. This value should not be used.
* </pre>
*
* <code>MODE_UNSPECIFIED = 0;</code>
*/
MODE_UNSPECIFIED(0),
/**
*
*
* <pre>
* Default model behavior, model decides to predict either function calls
* or natural language response.
* </pre>
*
* <code>AUTO = 1;</code>
*/
AUTO(1),
/**
*
*
* <pre>
* Model is constrained to always predicting function calls only.
* If "allowed_function_names" are set, the predicted function calls will be
* limited to any one of "allowed_function_names", else the predicted
* function calls will be any one of the provided "function_declarations".
* </pre>
*
* <code>ANY = 2;</code>
*/
ANY(2),
/**
*
*
* <pre>
* Model will not predict any function calls. Model behavior is same as when
* not passing any function declarations.
* </pre>
*
* <code>NONE = 3;</code>
*/
NONE(3),
UNRECOGNIZED(-1),
;
/**
*
*
* <pre>
* Unspecified function calling mode. This value should not be used.
* </pre>
*
* <code>MODE_UNSPECIFIED = 0;</code>
*/
public static final int MODE_UNSPECIFIED_VALUE = 0;
/**
*
*
* <pre>
* Default model behavior, model decides to predict either function calls
* or natural language response.
* </pre>
*
* <code>AUTO = 1;</code>
*/
public static final int AUTO_VALUE = 1;
/**
*
*
* <pre>
* Model is constrained to always predicting function calls only.
* If "allowed_function_names" are set, the predicted function calls will be
* limited to any one of "allowed_function_names", else the predicted
* function calls will be any one of the provided "function_declarations".
* </pre>
*
* <code>ANY = 2;</code>
*/
public static final int ANY_VALUE = 2;
/**
*
*
* <pre>
* Model will not predict any function calls. Model behavior is same as when
* not passing any function declarations.
* </pre>
*
* <code>NONE = 3;</code>
*/
public static final int NONE_VALUE = 3;
public final int getNumber() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalArgumentException(
"Can't get the number of an unknown enum value.");
}
return value;
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
* @deprecated Use {@link #forNumber(int)} instead.
*/
@java.lang.Deprecated
public static Mode valueOf(int value) {
return forNumber(value);
}
/**
* @param value The numeric wire value of the corresponding enum entry.
* @return The enum associated with the given numeric wire value.
*/
public static Mode forNumber(int value) {
switch (value) {
case 0:
return MODE_UNSPECIFIED;
case 1:
return AUTO;
case 2:
return ANY;
case 3:
return NONE;
default:
return null;
}
}
public static com.google.protobuf.Internal.EnumLiteMap<Mode> internalGetValueMap() {
return internalValueMap;
}
private static final com.google.protobuf.Internal.EnumLiteMap<Mode> internalValueMap =
new com.google.protobuf.Internal.EnumLiteMap<Mode>() {
public Mode findValueByNumber(int number) {
return Mode.forNumber(number);
}
};
public final com.google.protobuf.Descriptors.EnumValueDescriptor getValueDescriptor() {
if (this == UNRECOGNIZED) {
throw new java.lang.IllegalStateException(
"Can't get the descriptor of an unrecognized enum value.");
}
return getDescriptor().getValues().get(ordinal());
}
public final com.google.protobuf.Descriptors.EnumDescriptor getDescriptorForType() {
return getDescriptor();
}
public static final com.google.protobuf.Descriptors.EnumDescriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.FunctionCallingConfig.getDescriptor()
.getEnumTypes()
.get(0);
}
private static final Mode[] VALUES = values();
public static Mode valueOf(com.google.protobuf.Descriptors.EnumValueDescriptor desc) {
if (desc.getType() != getDescriptor()) {
throw new java.lang.IllegalArgumentException("EnumValueDescriptor is not for this type.");
}
if (desc.getIndex() == -1) {
return UNRECOGNIZED;
}
return VALUES[desc.getIndex()];
}
private final int value;
private Mode(int value) {
this.value = value;
}
// @@protoc_insertion_point(enum_scope:google.cloud.aiplatform.v1.FunctionCallingConfig.Mode)
}
public static final int MODE_FIELD_NUMBER = 1;
private int mode_ = 0;
/**
*
*
* <pre>
* Optional. Function calling mode.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.FunctionCallingConfig.Mode mode = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The enum numeric value on the wire for mode.
*/
@java.lang.Override
public int getModeValue() {
return mode_;
}
/**
*
*
* <pre>
* Optional. Function calling mode.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.FunctionCallingConfig.Mode mode = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The mode.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.FunctionCallingConfig.Mode getMode() {
com.google.cloud.aiplatform.v1.FunctionCallingConfig.Mode result =
com.google.cloud.aiplatform.v1.FunctionCallingConfig.Mode.forNumber(mode_);
return result == null
? com.google.cloud.aiplatform.v1.FunctionCallingConfig.Mode.UNRECOGNIZED
: result;
}
public static final int ALLOWED_FUNCTION_NAMES_FIELD_NUMBER = 2;
@SuppressWarnings("serial")
private com.google.protobuf.LazyStringArrayList allowedFunctionNames_ =
com.google.protobuf.LazyStringArrayList.emptyList();
/**
*
*
* <pre>
* Optional. Function names to call. Only set when the Mode is ANY. Function
* names should match [FunctionDeclaration.name]. With mode set to ANY, model
* will predict a function call from the set of function names provided.
* </pre>
*
* <code>repeated string allowed_function_names = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return A list containing the allowedFunctionNames.
*/
public com.google.protobuf.ProtocolStringList getAllowedFunctionNamesList() {
return allowedFunctionNames_;
}
/**
*
*
* <pre>
* Optional. Function names to call. Only set when the Mode is ANY. Function
* names should match [FunctionDeclaration.name]. With mode set to ANY, model
* will predict a function call from the set of function names provided.
* </pre>
*
* <code>repeated string allowed_function_names = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The count of allowedFunctionNames.
*/
public int getAllowedFunctionNamesCount() {
return allowedFunctionNames_.size();
}
/**
*
*
* <pre>
* Optional. Function names to call. Only set when the Mode is ANY. Function
* names should match [FunctionDeclaration.name]. With mode set to ANY, model
* will predict a function call from the set of function names provided.
* </pre>
*
* <code>repeated string allowed_function_names = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param index The index of the element to return.
* @return The allowedFunctionNames at the given index.
*/
public java.lang.String getAllowedFunctionNames(int index) {
return allowedFunctionNames_.get(index);
}
/**
*
*
* <pre>
* Optional. Function names to call. Only set when the Mode is ANY. Function
* names should match [FunctionDeclaration.name]. With mode set to ANY, model
* will predict a function call from the set of function names provided.
* </pre>
*
* <code>repeated string allowed_function_names = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param index The index of the value to return.
* @return The bytes of the allowedFunctionNames at the given index.
*/
public com.google.protobuf.ByteString getAllowedFunctionNamesBytes(int index) {
return allowedFunctionNames_.getByteString(index);
}
private byte memoizedIsInitialized = -1;
@java.lang.Override
public final boolean isInitialized() {
byte isInitialized = memoizedIsInitialized;
if (isInitialized == 1) return true;
if (isInitialized == 0) return false;
memoizedIsInitialized = 1;
return true;
}
@java.lang.Override
public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException {
if (mode_
!= com.google.cloud.aiplatform.v1.FunctionCallingConfig.Mode.MODE_UNSPECIFIED.getNumber()) {
output.writeEnum(1, mode_);
}
for (int i = 0; i < allowedFunctionNames_.size(); i++) {
com.google.protobuf.GeneratedMessageV3.writeString(
output, 2, allowedFunctionNames_.getRaw(i));
}
getUnknownFields().writeTo(output);
}
@java.lang.Override
public int getSerializedSize() {
int size = memoizedSize;
if (size != -1) return size;
size = 0;
if (mode_
!= com.google.cloud.aiplatform.v1.FunctionCallingConfig.Mode.MODE_UNSPECIFIED.getNumber()) {
size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, mode_);
}
{
int dataSize = 0;
for (int i = 0; i < allowedFunctionNames_.size(); i++) {
dataSize += computeStringSizeNoTag(allowedFunctionNames_.getRaw(i));
}
size += dataSize;
size += 1 * getAllowedFunctionNamesList().size();
}
size += getUnknownFields().getSerializedSize();
memoizedSize = size;
return size;
}
@java.lang.Override
public boolean equals(final java.lang.Object obj) {
if (obj == this) {
return true;
}
if (!(obj instanceof com.google.cloud.aiplatform.v1.FunctionCallingConfig)) {
return super.equals(obj);
}
com.google.cloud.aiplatform.v1.FunctionCallingConfig other =
(com.google.cloud.aiplatform.v1.FunctionCallingConfig) obj;
if (mode_ != other.mode_) return false;
if (!getAllowedFunctionNamesList().equals(other.getAllowedFunctionNamesList())) return false;
if (!getUnknownFields().equals(other.getUnknownFields())) return false;
return true;
}
@java.lang.Override
public int hashCode() {
if (memoizedHashCode != 0) {
return memoizedHashCode;
}
int hash = 41;
hash = (19 * hash) + getDescriptor().hashCode();
hash = (37 * hash) + MODE_FIELD_NUMBER;
hash = (53 * hash) + mode_;
if (getAllowedFunctionNamesCount() > 0) {
hash = (37 * hash) + ALLOWED_FUNCTION_NAMES_FIELD_NUMBER;
hash = (53 * hash) + getAllowedFunctionNamesList().hashCode();
}
hash = (29 * hash) + getUnknownFields().hashCode();
memoizedHashCode = hash;
return hash;
}
public static com.google.cloud.aiplatform.v1.FunctionCallingConfig parseFrom(
java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.FunctionCallingConfig parseFrom(
java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.FunctionCallingConfig parseFrom(
com.google.protobuf.ByteString data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.FunctionCallingConfig parseFrom(
com.google.protobuf.ByteString data,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.FunctionCallingConfig parseFrom(byte[] data)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data);
}
public static com.google.cloud.aiplatform.v1.FunctionCallingConfig parseFrom(
byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
return PARSER.parseFrom(data, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.FunctionCallingConfig parseFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.FunctionCallingConfig parseFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.FunctionCallingConfig parseDelimitedFrom(
java.io.InputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.FunctionCallingConfig parseDelimitedFrom(
java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(
PARSER, input, extensionRegistry);
}
public static com.google.cloud.aiplatform.v1.FunctionCallingConfig parseFrom(
com.google.protobuf.CodedInputStream input) throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input);
}
public static com.google.cloud.aiplatform.v1.FunctionCallingConfig parseFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
return com.google.protobuf.GeneratedMessageV3.parseWithIOException(
PARSER, input, extensionRegistry);
}
@java.lang.Override
public Builder newBuilderForType() {
return newBuilder();
}
public static Builder newBuilder() {
return DEFAULT_INSTANCE.toBuilder();
}
public static Builder newBuilder(com.google.cloud.aiplatform.v1.FunctionCallingConfig prototype) {
return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype);
}
@java.lang.Override
public Builder toBuilder() {
return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this);
}
@java.lang.Override
protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
Builder builder = new Builder(parent);
return builder;
}
/**
*
*
* <pre>
* Function calling config.
* </pre>
*
* Protobuf type {@code google.cloud.aiplatform.v1.FunctionCallingConfig}
*/
public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder>
implements
// @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.FunctionCallingConfig)
com.google.cloud.aiplatform.v1.FunctionCallingConfigOrBuilder {
public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() {
return com.google.cloud.aiplatform.v1.ToolProto
.internal_static_google_cloud_aiplatform_v1_FunctionCallingConfig_descriptor;
}
@java.lang.Override
protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable
internalGetFieldAccessorTable() {
return com.google.cloud.aiplatform.v1.ToolProto
.internal_static_google_cloud_aiplatform_v1_FunctionCallingConfig_fieldAccessorTable
.ensureFieldAccessorsInitialized(
com.google.cloud.aiplatform.v1.FunctionCallingConfig.class,
com.google.cloud.aiplatform.v1.FunctionCallingConfig.Builder.class);
}
// Construct using com.google.cloud.aiplatform.v1.FunctionCallingConfig.newBuilder()
private Builder() {}
private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) {
super(parent);
}
@java.lang.Override
public Builder clear() {
super.clear();
bitField0_ = 0;
mode_ = 0;
allowedFunctionNames_ = com.google.protobuf.LazyStringArrayList.emptyList();
return this;
}
@java.lang.Override
public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() {
return com.google.cloud.aiplatform.v1.ToolProto
.internal_static_google_cloud_aiplatform_v1_FunctionCallingConfig_descriptor;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.FunctionCallingConfig getDefaultInstanceForType() {
return com.google.cloud.aiplatform.v1.FunctionCallingConfig.getDefaultInstance();
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.FunctionCallingConfig build() {
com.google.cloud.aiplatform.v1.FunctionCallingConfig result = buildPartial();
if (!result.isInitialized()) {
throw newUninitializedMessageException(result);
}
return result;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.FunctionCallingConfig buildPartial() {
com.google.cloud.aiplatform.v1.FunctionCallingConfig result =
new com.google.cloud.aiplatform.v1.FunctionCallingConfig(this);
if (bitField0_ != 0) {
buildPartial0(result);
}
onBuilt();
return result;
}
private void buildPartial0(com.google.cloud.aiplatform.v1.FunctionCallingConfig result) {
int from_bitField0_ = bitField0_;
if (((from_bitField0_ & 0x00000001) != 0)) {
result.mode_ = mode_;
}
if (((from_bitField0_ & 0x00000002) != 0)) {
allowedFunctionNames_.makeImmutable();
result.allowedFunctionNames_ = allowedFunctionNames_;
}
}
@java.lang.Override
public Builder clone() {
return super.clone();
}
@java.lang.Override
public Builder setField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.setField(field, value);
}
@java.lang.Override
public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) {
return super.clearField(field);
}
@java.lang.Override
public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) {
return super.clearOneof(oneof);
}
@java.lang.Override
public Builder setRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) {
return super.setRepeatedField(field, index, value);
}
@java.lang.Override
public Builder addRepeatedField(
com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) {
return super.addRepeatedField(field, value);
}
@java.lang.Override
public Builder mergeFrom(com.google.protobuf.Message other) {
if (other instanceof com.google.cloud.aiplatform.v1.FunctionCallingConfig) {
return mergeFrom((com.google.cloud.aiplatform.v1.FunctionCallingConfig) other);
} else {
super.mergeFrom(other);
return this;
}
}
public Builder mergeFrom(com.google.cloud.aiplatform.v1.FunctionCallingConfig other) {
if (other == com.google.cloud.aiplatform.v1.FunctionCallingConfig.getDefaultInstance())
return this;
if (other.mode_ != 0) {
setModeValue(other.getModeValue());
}
if (!other.allowedFunctionNames_.isEmpty()) {
if (allowedFunctionNames_.isEmpty()) {
allowedFunctionNames_ = other.allowedFunctionNames_;
bitField0_ |= 0x00000002;
} else {
ensureAllowedFunctionNamesIsMutable();
allowedFunctionNames_.addAll(other.allowedFunctionNames_);
}
onChanged();
}
this.mergeUnknownFields(other.getUnknownFields());
onChanged();
return this;
}
@java.lang.Override
public final boolean isInitialized() {
return true;
}
@java.lang.Override
public Builder mergeFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws java.io.IOException {
if (extensionRegistry == null) {
throw new java.lang.NullPointerException();
}
try {
boolean done = false;
while (!done) {
int tag = input.readTag();
switch (tag) {
case 0:
done = true;
break;
case 8:
{
mode_ = input.readEnum();
bitField0_ |= 0x00000001;
break;
} // case 8
case 18:
{
java.lang.String s = input.readStringRequireUtf8();
ensureAllowedFunctionNamesIsMutable();
allowedFunctionNames_.add(s);
break;
} // case 18
default:
{
if (!super.parseUnknownField(input, extensionRegistry, tag)) {
done = true; // was an endgroup tag
}
break;
} // default:
} // switch (tag)
} // while (!done)
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.unwrapIOException();
} finally {
onChanged();
} // finally
return this;
}
private int bitField0_;
private int mode_ = 0;
/**
*
*
* <pre>
* Optional. Function calling mode.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.FunctionCallingConfig.Mode mode = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The enum numeric value on the wire for mode.
*/
@java.lang.Override
public int getModeValue() {
return mode_;
}
/**
*
*
* <pre>
* Optional. Function calling mode.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.FunctionCallingConfig.Mode mode = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param value The enum numeric value on the wire for mode to set.
* @return This builder for chaining.
*/
public Builder setModeValue(int value) {
mode_ = value;
bitField0_ |= 0x00000001;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Function calling mode.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.FunctionCallingConfig.Mode mode = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The mode.
*/
@java.lang.Override
public com.google.cloud.aiplatform.v1.FunctionCallingConfig.Mode getMode() {
com.google.cloud.aiplatform.v1.FunctionCallingConfig.Mode result =
com.google.cloud.aiplatform.v1.FunctionCallingConfig.Mode.forNumber(mode_);
return result == null
? com.google.cloud.aiplatform.v1.FunctionCallingConfig.Mode.UNRECOGNIZED
: result;
}
/**
*
*
* <pre>
* Optional. Function calling mode.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.FunctionCallingConfig.Mode mode = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param value The mode to set.
* @return This builder for chaining.
*/
public Builder setMode(com.google.cloud.aiplatform.v1.FunctionCallingConfig.Mode value) {
if (value == null) {
throw new NullPointerException();
}
bitField0_ |= 0x00000001;
mode_ = value.getNumber();
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Function calling mode.
* </pre>
*
* <code>
* .google.cloud.aiplatform.v1.FunctionCallingConfig.Mode mode = 1 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearMode() {
bitField0_ = (bitField0_ & ~0x00000001);
mode_ = 0;
onChanged();
return this;
}
private com.google.protobuf.LazyStringArrayList allowedFunctionNames_ =
com.google.protobuf.LazyStringArrayList.emptyList();
private void ensureAllowedFunctionNamesIsMutable() {
if (!allowedFunctionNames_.isModifiable()) {
allowedFunctionNames_ = new com.google.protobuf.LazyStringArrayList(allowedFunctionNames_);
}
bitField0_ |= 0x00000002;
}
/**
*
*
* <pre>
* Optional. Function names to call. Only set when the Mode is ANY. Function
* names should match [FunctionDeclaration.name]. With mode set to ANY, model
* will predict a function call from the set of function names provided.
* </pre>
*
* <code>repeated string allowed_function_names = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return A list containing the allowedFunctionNames.
*/
public com.google.protobuf.ProtocolStringList getAllowedFunctionNamesList() {
allowedFunctionNames_.makeImmutable();
return allowedFunctionNames_;
}
/**
*
*
* <pre>
* Optional. Function names to call. Only set when the Mode is ANY. Function
* names should match [FunctionDeclaration.name]. With mode set to ANY, model
* will predict a function call from the set of function names provided.
* </pre>
*
* <code>repeated string allowed_function_names = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return The count of allowedFunctionNames.
*/
public int getAllowedFunctionNamesCount() {
return allowedFunctionNames_.size();
}
/**
*
*
* <pre>
* Optional. Function names to call. Only set when the Mode is ANY. Function
* names should match [FunctionDeclaration.name]. With mode set to ANY, model
* will predict a function call from the set of function names provided.
* </pre>
*
* <code>repeated string allowed_function_names = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param index The index of the element to return.
* @return The allowedFunctionNames at the given index.
*/
public java.lang.String getAllowedFunctionNames(int index) {
return allowedFunctionNames_.get(index);
}
/**
*
*
* <pre>
* Optional. Function names to call. Only set when the Mode is ANY. Function
* names should match [FunctionDeclaration.name]. With mode set to ANY, model
* will predict a function call from the set of function names provided.
* </pre>
*
* <code>repeated string allowed_function_names = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param index The index of the value to return.
* @return The bytes of the allowedFunctionNames at the given index.
*/
public com.google.protobuf.ByteString getAllowedFunctionNamesBytes(int index) {
return allowedFunctionNames_.getByteString(index);
}
/**
*
*
* <pre>
* Optional. Function names to call. Only set when the Mode is ANY. Function
* names should match [FunctionDeclaration.name]. With mode set to ANY, model
* will predict a function call from the set of function names provided.
* </pre>
*
* <code>repeated string allowed_function_names = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param index The index to set the value at.
* @param value The allowedFunctionNames to set.
* @return This builder for chaining.
*/
public Builder setAllowedFunctionNames(int index, java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureAllowedFunctionNamesIsMutable();
allowedFunctionNames_.set(index, value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Function names to call. Only set when the Mode is ANY. Function
* names should match [FunctionDeclaration.name]. With mode set to ANY, model
* will predict a function call from the set of function names provided.
* </pre>
*
* <code>repeated string allowed_function_names = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param value The allowedFunctionNames to add.
* @return This builder for chaining.
*/
public Builder addAllowedFunctionNames(java.lang.String value) {
if (value == null) {
throw new NullPointerException();
}
ensureAllowedFunctionNamesIsMutable();
allowedFunctionNames_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Function names to call. Only set when the Mode is ANY. Function
* names should match [FunctionDeclaration.name]. With mode set to ANY, model
* will predict a function call from the set of function names provided.
* </pre>
*
* <code>repeated string allowed_function_names = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param values The allowedFunctionNames to add.
* @return This builder for chaining.
*/
public Builder addAllAllowedFunctionNames(java.lang.Iterable<java.lang.String> values) {
ensureAllowedFunctionNamesIsMutable();
com.google.protobuf.AbstractMessageLite.Builder.addAll(values, allowedFunctionNames_);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Function names to call. Only set when the Mode is ANY. Function
* names should match [FunctionDeclaration.name]. With mode set to ANY, model
* will predict a function call from the set of function names provided.
* </pre>
*
* <code>repeated string allowed_function_names = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @return This builder for chaining.
*/
public Builder clearAllowedFunctionNames() {
allowedFunctionNames_ = com.google.protobuf.LazyStringArrayList.emptyList();
bitField0_ = (bitField0_ & ~0x00000002);
;
onChanged();
return this;
}
/**
*
*
* <pre>
* Optional. Function names to call. Only set when the Mode is ANY. Function
* names should match [FunctionDeclaration.name]. With mode set to ANY, model
* will predict a function call from the set of function names provided.
* </pre>
*
* <code>repeated string allowed_function_names = 2 [(.google.api.field_behavior) = OPTIONAL];
* </code>
*
* @param value The bytes of the allowedFunctionNames to add.
* @return This builder for chaining.
*/
public Builder addAllowedFunctionNamesBytes(com.google.protobuf.ByteString value) {
if (value == null) {
throw new NullPointerException();
}
checkByteStringIsUtf8(value);
ensureAllowedFunctionNamesIsMutable();
allowedFunctionNames_.add(value);
bitField0_ |= 0x00000002;
onChanged();
return this;
}
@java.lang.Override
public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.setUnknownFields(unknownFields);
}
@java.lang.Override
public final Builder mergeUnknownFields(
final com.google.protobuf.UnknownFieldSet unknownFields) {
return super.mergeUnknownFields(unknownFields);
}
// @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.FunctionCallingConfig)
}
// @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.FunctionCallingConfig)
private static final com.google.cloud.aiplatform.v1.FunctionCallingConfig DEFAULT_INSTANCE;
static {
DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.FunctionCallingConfig();
}
public static com.google.cloud.aiplatform.v1.FunctionCallingConfig getDefaultInstance() {
return DEFAULT_INSTANCE;
}
private static final com.google.protobuf.Parser<FunctionCallingConfig> PARSER =
new com.google.protobuf.AbstractParser<FunctionCallingConfig>() {
@java.lang.Override
public FunctionCallingConfig parsePartialFrom(
com.google.protobuf.CodedInputStream input,
com.google.protobuf.ExtensionRegistryLite extensionRegistry)
throws com.google.protobuf.InvalidProtocolBufferException {
Builder builder = newBuilder();
try {
builder.mergeFrom(input, extensionRegistry);
} catch (com.google.protobuf.InvalidProtocolBufferException e) {
throw e.setUnfinishedMessage(builder.buildPartial());
} catch (com.google.protobuf.UninitializedMessageException e) {
throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial());
} catch (java.io.IOException e) {
throw new com.google.protobuf.InvalidProtocolBufferException(e)
.setUnfinishedMessage(builder.buildPartial());
}
return builder.buildPartial();
}
};
public static com.google.protobuf.Parser<FunctionCallingConfig> parser() {
return PARSER;
}
@java.lang.Override
public com.google.protobuf.Parser<FunctionCallingConfig> getParserForType() {
return PARSER;
}
@java.lang.Override
public com.google.cloud.aiplatform.v1.FunctionCallingConfig getDefaultInstanceForType() {
return DEFAULT_INSTANCE;
}
}
|
apache/harmony | 37,558 | classlib/modules/security/src/test/support/common/java/org/apache/harmony/security/tests/support/TestKeyPair.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
/**
* @author Vladimir N. Molotkov
*/
package org.apache.harmony.security.tests.support;
import java.security.KeyFactory;
import java.security.NoSuchAlgorithmException;
import java.security.PrivateKey;
import java.security.PublicKey;
import java.security.spec.InvalidKeySpecException;
import java.security.spec.PKCS8EncodedKeySpec;
import java.security.spec.X509EncodedKeySpec;
import java.util.HashMap;
/**
* Generates key pairs based on their encodings for some algorithms.
* Encodings generated using
* BEA JRockit j2sdk1.4.2_04 (http://www.bea.com)
*/
public class TestKeyPair {
private static final HashMap privateKeyEncoding = new HashMap();
private static final HashMap publicKeyEncoding = new HashMap();
private final String algorithmName;
private final KeyFactory kf;
static {
privateKeyEncoding.put("RSA", new byte[] {
(byte)0x30, (byte)0x82, (byte)0x02, (byte)0x77,
(byte)0x02, (byte)0x01, (byte)0x00, (byte)0x30,
(byte)0x0d, (byte)0x06, (byte)0x09, (byte)0x2a,
(byte)0x86, (byte)0x48, (byte)0x86, (byte)0xf7,
(byte)0x0d, (byte)0x01, (byte)0x01, (byte)0x01,
(byte)0x05, (byte)0x00, (byte)0x04, (byte)0x82,
(byte)0x02, (byte)0x61, (byte)0x30, (byte)0x82,
(byte)0x02, (byte)0x5d, (byte)0x02, (byte)0x01,
(byte)0x00, (byte)0x02, (byte)0x81, (byte)0x81,
(byte)0x00, (byte)0xb2, (byte)0x4a, (byte)0x9b,
(byte)0x5b, (byte)0xba, (byte)0x01, (byte)0xc0,
(byte)0xcd, (byte)0x65, (byte)0x09, (byte)0x63,
(byte)0x70, (byte)0x0b, (byte)0x5a, (byte)0x1b,
(byte)0x92, (byte)0x08, (byte)0xf8, (byte)0x55,
(byte)0x5e, (byte)0x7c, (byte)0x1b, (byte)0x50,
(byte)0x17, (byte)0xec, (byte)0x44, (byte)0x4c,
(byte)0x58, (byte)0x42, (byte)0x2b, (byte)0x41,
(byte)0x09, (byte)0x59, (byte)0xf2, (byte)0xe1,
(byte)0x5d, (byte)0x43, (byte)0x71, (byte)0x4d,
(byte)0x92, (byte)0x03, (byte)0x1d, (byte)0xb6,
(byte)0x6c, (byte)0x7f, (byte)0x5d, (byte)0x48,
(byte)0xcd, (byte)0x17, (byte)0xec, (byte)0xd7,
(byte)0x4c, (byte)0x39, (byte)0xb1, (byte)0x7b,
(byte)0xe2, (byte)0xbf, (byte)0x96, (byte)0x77,
(byte)0xbe, (byte)0xd0, (byte)0xa0, (byte)0xf0,
(byte)0x2d, (byte)0x6b, (byte)0x24, (byte)0xaa,
(byte)0x14, (byte)0xba, (byte)0x82, (byte)0x79,
(byte)0x10, (byte)0x9b, (byte)0x16, (byte)0x68,
(byte)0x47, (byte)0x81, (byte)0x54, (byte)0xa2,
(byte)0xfa, (byte)0x91, (byte)0x9e, (byte)0x0a,
(byte)0x2a, (byte)0x53, (byte)0xa6, (byte)0xe7,
(byte)0x9e, (byte)0x7d, (byte)0x29, (byte)0x33,
(byte)0xd8, (byte)0x05, (byte)0xfc, (byte)0x02,
(byte)0x3f, (byte)0xbd, (byte)0xc7, (byte)0x6e,
(byte)0xed, (byte)0xaa, (byte)0x30, (byte)0x6c,
(byte)0x5f, (byte)0x52, (byte)0xed, (byte)0x35,
(byte)0x65, (byte)0x4b, (byte)0x0e, (byte)0xc8,
(byte)0xa7, (byte)0x12, (byte)0x10, (byte)0x56,
(byte)0x37, (byte)0xaf, (byte)0x11, (byte)0xfa,
(byte)0x21, (byte)0x0e, (byte)0x99, (byte)0xff,
(byte)0xfa, (byte)0x8c, (byte)0x65, (byte)0x8e,
(byte)0x6d, (byte)0x02, (byte)0x03, (byte)0x01,
(byte)0x00, (byte)0x01, (byte)0x02, (byte)0x81,
(byte)0x80, (byte)0x78, (byte)0x41, (byte)0x72,
(byte)0x40, (byte)0x90, (byte)0x59, (byte)0x96,
(byte)0x5d, (byte)0xf3, (byte)0x84, (byte)0x3d,
(byte)0x99, (byte)0xd9, (byte)0x4e, (byte)0x51,
(byte)0xc2, (byte)0x52, (byte)0x62, (byte)0x8d,
(byte)0xd2, (byte)0x49, (byte)0x0b, (byte)0x73,
(byte)0x1e, (byte)0x6f, (byte)0xb2, (byte)0x31,
(byte)0x7c, (byte)0x66, (byte)0x45, (byte)0x1e,
(byte)0x7c, (byte)0xdc, (byte)0x3a, (byte)0xc2,
(byte)0x5f, (byte)0x51, (byte)0x9a, (byte)0x1e,
(byte)0xa4, (byte)0x19, (byte)0x8d, (byte)0xf4,
(byte)0xf9, (byte)0x81, (byte)0x7e, (byte)0xbe,
(byte)0x17, (byte)0xf7, (byte)0xc7, (byte)0x3c,
(byte)0x00, (byte)0xa1, (byte)0xf9, (byte)0x60,
(byte)0x82, (byte)0x34, (byte)0x8f, (byte)0x9c,
(byte)0xfd, (byte)0x0b, (byte)0x63, (byte)0x42,
(byte)0x1b, (byte)0x7f, (byte)0x45, (byte)0xf1,
(byte)0x31, (byte)0xc3, (byte)0x63, (byte)0x47,
(byte)0x5c, (byte)0xc1, (byte)0xb2, (byte)0x5f,
(byte)0x57, (byte)0xee, (byte)0x02, (byte)0x9f,
(byte)0x5e, (byte)0x08, (byte)0x48, (byte)0xba,
(byte)0x74, (byte)0xba, (byte)0x81, (byte)0xb7,
(byte)0x30, (byte)0xac, (byte)0x4c, (byte)0x01,
(byte)0x35, (byte)0xce, (byte)0x46, (byte)0x47,
(byte)0x8c, (byte)0xe4, (byte)0x62, (byte)0x36,
(byte)0x1a, (byte)0x65, (byte)0x0e, (byte)0x33,
(byte)0x56, (byte)0xf9, (byte)0xb7, (byte)0xa0,
(byte)0xc4, (byte)0xb6, (byte)0x82, (byte)0x55,
(byte)0x7d, (byte)0x36, (byte)0x55, (byte)0xc0,
(byte)0x52, (byte)0x5e, (byte)0x35, (byte)0x54,
(byte)0xbd, (byte)0x97, (byte)0x01, (byte)0x00,
(byte)0xbf, (byte)0x10, (byte)0xdc, (byte)0x1b,
(byte)0x51, (byte)0x02, (byte)0x41, (byte)0x00,
(byte)0xe7, (byte)0x68, (byte)0x03, (byte)0x3e,
(byte)0x21, (byte)0x64, (byte)0x68, (byte)0x24,
(byte)0x7b, (byte)0xd0, (byte)0x31, (byte)0xa0,
(byte)0xa2, (byte)0xd9, (byte)0x87, (byte)0x6d,
(byte)0x79, (byte)0x81, (byte)0x8f, (byte)0x8f,
(byte)0x2d, (byte)0x7a, (byte)0x95, (byte)0x2e,
(byte)0x55, (byte)0x9f, (byte)0xd7, (byte)0x86,
(byte)0x29, (byte)0x93, (byte)0xbd, (byte)0x04,
(byte)0x7e, (byte)0x4f, (byte)0xdb, (byte)0x56,
(byte)0xf1, (byte)0x75, (byte)0xd0, (byte)0x4b,
(byte)0x00, (byte)0x3a, (byte)0xe0, (byte)0x26,
(byte)0xf6, (byte)0xab, (byte)0x9e, (byte)0x0b,
(byte)0x2a, (byte)0xf4, (byte)0xa8, (byte)0xd7,
(byte)0xff, (byte)0xbe, (byte)0x01, (byte)0xeb,
(byte)0x9b, (byte)0x81, (byte)0xc7, (byte)0x5f,
(byte)0x02, (byte)0x73, (byte)0xe1, (byte)0x2b,
(byte)0x02, (byte)0x41, (byte)0x00, (byte)0xc5,
(byte)0x3d, (byte)0x78, (byte)0xab, (byte)0xe6,
(byte)0xab, (byte)0x3e, (byte)0x29, (byte)0xfd,
(byte)0x98, (byte)0xd0, (byte)0xa4, (byte)0x3e,
(byte)0x58, (byte)0xee, (byte)0x48, (byte)0x45,
(byte)0xa3, (byte)0x66, (byte)0xac, (byte)0xe9,
(byte)0x4d, (byte)0xbd, (byte)0x60, (byte)0xea,
(byte)0x24, (byte)0xff, (byte)0xed, (byte)0x0c,
(byte)0x67, (byte)0xc5, (byte)0xfd, (byte)0x36,
(byte)0x28, (byte)0xea, (byte)0x74, (byte)0x88,
(byte)0xd1, (byte)0xd1, (byte)0xad, (byte)0x58,
(byte)0xd7, (byte)0xf0, (byte)0x67, (byte)0x20,
(byte)0xc1, (byte)0xe3, (byte)0xb3, (byte)0xdb,
(byte)0x52, (byte)0xad, (byte)0xf3, (byte)0xc4,
(byte)0x21, (byte)0xd8, (byte)0x8c, (byte)0x4c,
(byte)0x41, (byte)0x27, (byte)0xdb, (byte)0xd0,
(byte)0x35, (byte)0x92, (byte)0xc7, (byte)0x02,
(byte)0x41, (byte)0x00, (byte)0xe0, (byte)0x99,
(byte)0x42, (byte)0xb4, (byte)0x76, (byte)0x02,
(byte)0x97, (byte)0x55, (byte)0xf9, (byte)0xda,
(byte)0x3b, (byte)0xa0, (byte)0xd7, (byte)0x0e,
(byte)0xdc, (byte)0xf4, (byte)0x33, (byte)0x7f,
(byte)0xbd, (byte)0xcf, (byte)0xd0, (byte)0xeb,
(byte)0x6e, (byte)0x89, (byte)0xf7, (byte)0x4f,
(byte)0x5a, (byte)0x07, (byte)0x7c, (byte)0xa9,
(byte)0x49, (byte)0x47, (byte)0x68, (byte)0x35,
(byte)0xa8, (byte)0x05, (byte)0x3d, (byte)0xfd,
(byte)0x04, (byte)0x7b, (byte)0x17, (byte)0x31,
(byte)0x0d, (byte)0xc8, (byte)0xa3, (byte)0x98,
(byte)0x34, (byte)0xa0, (byte)0x50, (byte)0x44,
(byte)0x00, (byte)0xf1, (byte)0x0c, (byte)0xe6,
(byte)0xe5, (byte)0xc4, (byte)0x41, (byte)0x3d,
(byte)0xf8, (byte)0x3d, (byte)0x4e, (byte)0x0b,
(byte)0x1c, (byte)0xdb, (byte)0x02, (byte)0x41,
(byte)0x00, (byte)0x82, (byte)0x9b, (byte)0x8a,
(byte)0xfd, (byte)0xa1, (byte)0x98, (byte)0x41,
(byte)0x68, (byte)0xc2, (byte)0xd1, (byte)0xdf,
(byte)0x4e, (byte)0xf3, (byte)0x2e, (byte)0x26,
(byte)0x53, (byte)0x5b, (byte)0x31, (byte)0xb1,
(byte)0x7a, (byte)0xcc, (byte)0x5e, (byte)0xbb,
(byte)0x09, (byte)0xa2, (byte)0xe2, (byte)0x6f,
(byte)0x4a, (byte)0x04, (byte)0x0d, (byte)0xef,
(byte)0x90, (byte)0x15, (byte)0xbe, (byte)0x10,
(byte)0x4a, (byte)0xac, (byte)0x92, (byte)0xeb,
(byte)0xda, (byte)0x72, (byte)0xdb, (byte)0x43,
(byte)0x08, (byte)0xb7, (byte)0x2b, (byte)0x4c,
(byte)0xe1, (byte)0xbb, (byte)0x58, (byte)0xcb,
(byte)0x71, (byte)0x80, (byte)0xad, (byte)0xbc,
(byte)0xdc, (byte)0x62, (byte)0x5e, (byte)0x3e,
(byte)0xcb, (byte)0x92, (byte)0xda, (byte)0xf6,
(byte)0xdf, (byte)0x02, (byte)0x40, (byte)0x4d,
(byte)0x81, (byte)0x90, (byte)0xc5, (byte)0x77,
(byte)0x30, (byte)0xb7, (byte)0x29, (byte)0x00,
(byte)0xa8, (byte)0xf1, (byte)0xb4, (byte)0xae,
(byte)0x52, (byte)0x63, (byte)0x00, (byte)0xb2,
(byte)0x2d, (byte)0x3e, (byte)0x7d, (byte)0xd6,
(byte)0x4d, (byte)0xf9, (byte)0x8a, (byte)0xc1,
(byte)0xb1, (byte)0x98, (byte)0x89, (byte)0x52,
(byte)0x40, (byte)0x14, (byte)0x1b, (byte)0x0e,
(byte)0x61, (byte)0x8f, (byte)0xf4, (byte)0xbe,
(byte)0x59, (byte)0x79, (byte)0x79, (byte)0x95,
(byte)0x19, (byte)0x5c, (byte)0x51, (byte)0x08,
(byte)0x66, (byte)0xc1, (byte)0x42, (byte)0x30,
(byte)0xb3, (byte)0x7a, (byte)0x86, (byte)0x9f,
(byte)0x3e, (byte)0xf5, (byte)0x19, (byte)0xa3,
(byte)0xae, (byte)0x64, (byte)0x69, (byte)0x14,
(byte)0x07, (byte)0x50, (byte)0x97
});
publicKeyEncoding.put("RSA", new byte[] {
(byte)0x30, (byte)0x81, (byte)0x9f, (byte)0x30,
(byte)0x0d, (byte)0x06, (byte)0x09, (byte)0x2a,
(byte)0x86, (byte)0x48, (byte)0x86, (byte)0xf7,
(byte)0x0d, (byte)0x01, (byte)0x01, (byte)0x01,
(byte)0x05, (byte)0x00, (byte)0x03, (byte)0x81,
(byte)0x8d, (byte)0x00, (byte)0x30, (byte)0x81,
(byte)0x89, (byte)0x02, (byte)0x81, (byte)0x81,
(byte)0x00, (byte)0xb2, (byte)0x4a, (byte)0x9b,
(byte)0x5b, (byte)0xba, (byte)0x01, (byte)0xc0,
(byte)0xcd, (byte)0x65, (byte)0x09, (byte)0x63,
(byte)0x70, (byte)0x0b, (byte)0x5a, (byte)0x1b,
(byte)0x92, (byte)0x08, (byte)0xf8, (byte)0x55,
(byte)0x5e, (byte)0x7c, (byte)0x1b, (byte)0x50,
(byte)0x17, (byte)0xec, (byte)0x44, (byte)0x4c,
(byte)0x58, (byte)0x42, (byte)0x2b, (byte)0x41,
(byte)0x09, (byte)0x59, (byte)0xf2, (byte)0xe1,
(byte)0x5d, (byte)0x43, (byte)0x71, (byte)0x4d,
(byte)0x92, (byte)0x03, (byte)0x1d, (byte)0xb6,
(byte)0x6c, (byte)0x7f, (byte)0x5d, (byte)0x48,
(byte)0xcd, (byte)0x17, (byte)0xec, (byte)0xd7,
(byte)0x4c, (byte)0x39, (byte)0xb1, (byte)0x7b,
(byte)0xe2, (byte)0xbf, (byte)0x96, (byte)0x77,
(byte)0xbe, (byte)0xd0, (byte)0xa0, (byte)0xf0,
(byte)0x2d, (byte)0x6b, (byte)0x24, (byte)0xaa,
(byte)0x14, (byte)0xba, (byte)0x82, (byte)0x79,
(byte)0x10, (byte)0x9b, (byte)0x16, (byte)0x68,
(byte)0x47, (byte)0x81, (byte)0x54, (byte)0xa2,
(byte)0xfa, (byte)0x91, (byte)0x9e, (byte)0x0a,
(byte)0x2a, (byte)0x53, (byte)0xa6, (byte)0xe7,
(byte)0x9e, (byte)0x7d, (byte)0x29, (byte)0x33,
(byte)0xd8, (byte)0x05, (byte)0xfc, (byte)0x02,
(byte)0x3f, (byte)0xbd, (byte)0xc7, (byte)0x6e,
(byte)0xed, (byte)0xaa, (byte)0x30, (byte)0x6c,
(byte)0x5f, (byte)0x52, (byte)0xed, (byte)0x35,
(byte)0x65, (byte)0x4b, (byte)0x0e, (byte)0xc8,
(byte)0xa7, (byte)0x12, (byte)0x10, (byte)0x56,
(byte)0x37, (byte)0xaf, (byte)0x11, (byte)0xfa,
(byte)0x21, (byte)0x0e, (byte)0x99, (byte)0xff,
(byte)0xfa, (byte)0x8c, (byte)0x65, (byte)0x8e,
(byte)0x6d, (byte)0x02, (byte)0x03, (byte)0x01,
(byte)0x00, (byte)0x01
});
privateKeyEncoding.put("DSA", new byte[] {
(byte)0x30, (byte)0x82, (byte)0x01, (byte)0x4a,
(byte)0x02, (byte)0x01, (byte)0x00, (byte)0x30,
(byte)0x82, (byte)0x01, (byte)0x2b, (byte)0x06,
(byte)0x07, (byte)0x2a, (byte)0x86, (byte)0x48,
(byte)0xce, (byte)0x38, (byte)0x04, (byte)0x01,
(byte)0x30, (byte)0x82, (byte)0x01, (byte)0x1e,
(byte)0x02, (byte)0x81, (byte)0x81, (byte)0x00,
(byte)0xca, (byte)0x84, (byte)0x1d, (byte)0xa3,
(byte)0xab, (byte)0xb9, (byte)0x98, (byte)0xf4,
(byte)0x61, (byte)0x8b, (byte)0x66, (byte)0xdb,
(byte)0x4e, (byte)0x3a, (byte)0xb2, (byte)0x11,
(byte)0x4e, (byte)0xa9, (byte)0xda, (byte)0x35,
(byte)0x91, (byte)0xc9, (byte)0x4e, (byte)0xc3,
(byte)0x16, (byte)0xa7, (byte)0xed, (byte)0xb8,
(byte)0x8f, (byte)0xd7, (byte)0xea, (byte)0xea,
(byte)0xdb, (byte)0x77, (byte)0xe1, (byte)0x77,
(byte)0x7a, (byte)0xc9, (byte)0xf3, (byte)0x37,
(byte)0x33, (byte)0x01, (byte)0x72, (byte)0xbc,
(byte)0xd0, (byte)0x89, (byte)0x9b, (byte)0x18,
(byte)0xfd, (byte)0x84, (byte)0xd6, (byte)0xe9,
(byte)0xbf, (byte)0x13, (byte)0x35, (byte)0x5e,
(byte)0x40, (byte)0xf6, (byte)0x9d, (byte)0xd9,
(byte)0x1a, (byte)0xba, (byte)0xa9, (byte)0xc3,
(byte)0x8c, (byte)0xe3, (byte)0x95, (byte)0xc8,
(byte)0xdf, (byte)0x2e, (byte)0x41, (byte)0xa1,
(byte)0xbf, (byte)0xde, (byte)0x5d, (byte)0xad,
(byte)0x21, (byte)0xcc, (byte)0x0d, (byte)0x42,
(byte)0x56, (byte)0xa0, (byte)0x32, (byte)0xc0,
(byte)0x90, (byte)0x73, (byte)0x3e, (byte)0xa4,
(byte)0x0e, (byte)0x58, (byte)0xe4, (byte)0x64,
(byte)0x00, (byte)0xa3, (byte)0x27, (byte)0x49,
(byte)0x56, (byte)0xb2, (byte)0x43, (byte)0xbc,
(byte)0x72, (byte)0xa8, (byte)0xd2, (byte)0x26,
(byte)0x89, (byte)0x35, (byte)0x37, (byte)0x29,
(byte)0x8d, (byte)0x21, (byte)0xb5, (byte)0x8e,
(byte)0x59, (byte)0xfa, (byte)0x9e, (byte)0xdf,
(byte)0x37, (byte)0x0d, (byte)0x9e, (byte)0xab,
(byte)0xfd, (byte)0xbf, (byte)0x1a, (byte)0x9e,
(byte)0xf3, (byte)0xe8, (byte)0x3a, (byte)0xfb,
(byte)0x02, (byte)0x15, (byte)0x00, (byte)0xa2,
(byte)0x4e, (byte)0x5d, (byte)0xe3, (byte)0x10,
(byte)0x5d, (byte)0xa9, (byte)0x3a, (byte)0x6a,
(byte)0x4d, (byte)0x07, (byte)0x3b, (byte)0xab,
(byte)0xca, (byte)0x7d, (byte)0x09, (byte)0xd6,
(byte)0x06, (byte)0x79, (byte)0x49, (byte)0x02,
(byte)0x81, (byte)0x80, (byte)0x5a, (byte)0x91,
(byte)0x83, (byte)0x1c, (byte)0x04, (byte)0x33,
(byte)0xca, (byte)0x25, (byte)0xb0, (byte)0x68,
(byte)0xb3, (byte)0xb3, (byte)0xab, (byte)0x55,
(byte)0x29, (byte)0x33, (byte)0x4d, (byte)0xa9,
(byte)0x33, (byte)0x39, (byte)0xef, (byte)0x71,
(byte)0xca, (byte)0x95, (byte)0xf3, (byte)0xd8,
(byte)0x27, (byte)0x56, (byte)0x5f, (byte)0x42,
(byte)0xda, (byte)0x36, (byte)0x83, (byte)0xc5,
(byte)0xf1, (byte)0x53, (byte)0x62, (byte)0xa5,
(byte)0xdc, (byte)0xe6, (byte)0x4e, (byte)0x69,
(byte)0x45, (byte)0x71, (byte)0x1a, (byte)0x4a,
(byte)0xc3, (byte)0xf4, (byte)0x7f, (byte)0x0a,
(byte)0xd1, (byte)0x78, (byte)0xed, (byte)0xbe,
(byte)0x6e, (byte)0xa6, (byte)0x36, (byte)0x34,
(byte)0x4e, (byte)0xc3, (byte)0x1b, (byte)0x17,
(byte)0xaa, (byte)0xa4, (byte)0x76, (byte)0x44,
(byte)0x46, (byte)0xaf, (byte)0x26, (byte)0x16,
(byte)0x14, (byte)0xfb, (byte)0x9f, (byte)0x5d,
(byte)0x08, (byte)0xaf, (byte)0x92, (byte)0xdb,
(byte)0xba, (byte)0xd0, (byte)0xcb, (byte)0x8b,
(byte)0x1e, (byte)0xc3, (byte)0x8b, (byte)0x36,
(byte)0x3b, (byte)0x4c, (byte)0x02, (byte)0xc3,
(byte)0x66, (byte)0x28, (byte)0x69, (byte)0xd0,
(byte)0x74, (byte)0x4f, (byte)0x1c, (byte)0x4f,
(byte)0x97, (byte)0x75, (byte)0x7f, (byte)0x9e,
(byte)0x89, (byte)0x80, (byte)0xcf, (byte)0xb2,
(byte)0x17, (byte)0xd6, (byte)0x66, (byte)0x91,
(byte)0x12, (byte)0x3a, (byte)0xb0, (byte)0x3c,
(byte)0x3c, (byte)0xc2, (byte)0x31, (byte)0xd1,
(byte)0x31, (byte)0x2a, (byte)0x35, (byte)0xbe,
(byte)0x9d, (byte)0x54, (byte)0x71, (byte)0x03,
(byte)0xcb, (byte)0xcc, (byte)0x04, (byte)0x16,
(byte)0x02, (byte)0x14, (byte)0x52, (byte)0xfb,
(byte)0xf9, (byte)0x12, (byte)0x40, (byte)0x05,
(byte)0x59, (byte)0x8f, (byte)0xde, (byte)0x9d,
(byte)0xac, (byte)0xa1, (byte)0xe2, (byte)0xed,
(byte)0x56, (byte)0x62, (byte)0x5f, (byte)0x56,
(byte)0x67, (byte)0x74
});
publicKeyEncoding.put("DSA", new byte[] {
(byte)0x30, (byte)0x82, (byte)0x01, (byte)0xb7,
(byte)0x30, (byte)0x82, (byte)0x01, (byte)0x2b,
(byte)0x06, (byte)0x07, (byte)0x2a, (byte)0x86,
(byte)0x48, (byte)0xce, (byte)0x38, (byte)0x04,
(byte)0x01, (byte)0x30, (byte)0x82, (byte)0x01,
(byte)0x1e, (byte)0x02, (byte)0x81, (byte)0x81,
(byte)0x00, (byte)0xca, (byte)0x84, (byte)0x1d,
(byte)0xa3, (byte)0xab, (byte)0xb9, (byte)0x98,
(byte)0xf4, (byte)0x61, (byte)0x8b, (byte)0x66,
(byte)0xdb, (byte)0x4e, (byte)0x3a, (byte)0xb2,
(byte)0x11, (byte)0x4e, (byte)0xa9, (byte)0xda,
(byte)0x35, (byte)0x91, (byte)0xc9, (byte)0x4e,
(byte)0xc3, (byte)0x16, (byte)0xa7, (byte)0xed,
(byte)0xb8, (byte)0x8f, (byte)0xd7, (byte)0xea,
(byte)0xea, (byte)0xdb, (byte)0x77, (byte)0xe1,
(byte)0x77, (byte)0x7a, (byte)0xc9, (byte)0xf3,
(byte)0x37, (byte)0x33, (byte)0x01, (byte)0x72,
(byte)0xbc, (byte)0xd0, (byte)0x89, (byte)0x9b,
(byte)0x18, (byte)0xfd, (byte)0x84, (byte)0xd6,
(byte)0xe9, (byte)0xbf, (byte)0x13, (byte)0x35,
(byte)0x5e, (byte)0x40, (byte)0xf6, (byte)0x9d,
(byte)0xd9, (byte)0x1a, (byte)0xba, (byte)0xa9,
(byte)0xc3, (byte)0x8c, (byte)0xe3, (byte)0x95,
(byte)0xc8, (byte)0xdf, (byte)0x2e, (byte)0x41,
(byte)0xa1, (byte)0xbf, (byte)0xde, (byte)0x5d,
(byte)0xad, (byte)0x21, (byte)0xcc, (byte)0x0d,
(byte)0x42, (byte)0x56, (byte)0xa0, (byte)0x32,
(byte)0xc0, (byte)0x90, (byte)0x73, (byte)0x3e,
(byte)0xa4, (byte)0x0e, (byte)0x58, (byte)0xe4,
(byte)0x64, (byte)0x00, (byte)0xa3, (byte)0x27,
(byte)0x49, (byte)0x56, (byte)0xb2, (byte)0x43,
(byte)0xbc, (byte)0x72, (byte)0xa8, (byte)0xd2,
(byte)0x26, (byte)0x89, (byte)0x35, (byte)0x37,
(byte)0x29, (byte)0x8d, (byte)0x21, (byte)0xb5,
(byte)0x8e, (byte)0x59, (byte)0xfa, (byte)0x9e,
(byte)0xdf, (byte)0x37, (byte)0x0d, (byte)0x9e,
(byte)0xab, (byte)0xfd, (byte)0xbf, (byte)0x1a,
(byte)0x9e, (byte)0xf3, (byte)0xe8, (byte)0x3a,
(byte)0xfb, (byte)0x02, (byte)0x15, (byte)0x00,
(byte)0xa2, (byte)0x4e, (byte)0x5d, (byte)0xe3,
(byte)0x10, (byte)0x5d, (byte)0xa9, (byte)0x3a,
(byte)0x6a, (byte)0x4d, (byte)0x07, (byte)0x3b,
(byte)0xab, (byte)0xca, (byte)0x7d, (byte)0x09,
(byte)0xd6, (byte)0x06, (byte)0x79, (byte)0x49,
(byte)0x02, (byte)0x81, (byte)0x80, (byte)0x5a,
(byte)0x91, (byte)0x83, (byte)0x1c, (byte)0x04,
(byte)0x33, (byte)0xca, (byte)0x25, (byte)0xb0,
(byte)0x68, (byte)0xb3, (byte)0xb3, (byte)0xab,
(byte)0x55, (byte)0x29, (byte)0x33, (byte)0x4d,
(byte)0xa9, (byte)0x33, (byte)0x39, (byte)0xef,
(byte)0x71, (byte)0xca, (byte)0x95, (byte)0xf3,
(byte)0xd8, (byte)0x27, (byte)0x56, (byte)0x5f,
(byte)0x42, (byte)0xda, (byte)0x36, (byte)0x83,
(byte)0xc5, (byte)0xf1, (byte)0x53, (byte)0x62,
(byte)0xa5, (byte)0xdc, (byte)0xe6, (byte)0x4e,
(byte)0x69, (byte)0x45, (byte)0x71, (byte)0x1a,
(byte)0x4a, (byte)0xc3, (byte)0xf4, (byte)0x7f,
(byte)0x0a, (byte)0xd1, (byte)0x78, (byte)0xed,
(byte)0xbe, (byte)0x6e, (byte)0xa6, (byte)0x36,
(byte)0x34, (byte)0x4e, (byte)0xc3, (byte)0x1b,
(byte)0x17, (byte)0xaa, (byte)0xa4, (byte)0x76,
(byte)0x44, (byte)0x46, (byte)0xaf, (byte)0x26,
(byte)0x16, (byte)0x14, (byte)0xfb, (byte)0x9f,
(byte)0x5d, (byte)0x08, (byte)0xaf, (byte)0x92,
(byte)0xdb, (byte)0xba, (byte)0xd0, (byte)0xcb,
(byte)0x8b, (byte)0x1e, (byte)0xc3, (byte)0x8b,
(byte)0x36, (byte)0x3b, (byte)0x4c, (byte)0x02,
(byte)0xc3, (byte)0x66, (byte)0x28, (byte)0x69,
(byte)0xd0, (byte)0x74, (byte)0x4f, (byte)0x1c,
(byte)0x4f, (byte)0x97, (byte)0x75, (byte)0x7f,
(byte)0x9e, (byte)0x89, (byte)0x80, (byte)0xcf,
(byte)0xb2, (byte)0x17, (byte)0xd6, (byte)0x66,
(byte)0x91, (byte)0x12, (byte)0x3a, (byte)0xb0,
(byte)0x3c, (byte)0x3c, (byte)0xc2, (byte)0x31,
(byte)0xd1, (byte)0x31, (byte)0x2a, (byte)0x35,
(byte)0xbe, (byte)0x9d, (byte)0x54, (byte)0x71,
(byte)0x03, (byte)0xcb, (byte)0xcc, (byte)0x03,
(byte)0x81, (byte)0x85, (byte)0x00, (byte)0x02,
(byte)0x81, (byte)0x81, (byte)0x00, (byte)0x95,
(byte)0xcc, (byte)0x11, (byte)0xd4, (byte)0x53,
(byte)0x3d, (byte)0x9c, (byte)0x5c, (byte)0x73,
(byte)0xf4, (byte)0x70, (byte)0xf0, (byte)0xe1,
(byte)0xac, (byte)0xe3, (byte)0x2c, (byte)0x32,
(byte)0x16, (byte)0x1d, (byte)0x34, (byte)0x1a,
(byte)0x38, (byte)0x63, (byte)0x69, (byte)0x1a,
(byte)0x72, (byte)0x39, (byte)0x4e, (byte)0x41,
(byte)0x50, (byte)0xfa, (byte)0xdc, (byte)0x78,
(byte)0xa4, (byte)0xb8, (byte)0x17, (byte)0x5a,
(byte)0xe4, (byte)0xf9, (byte)0xa2, (byte)0x52,
(byte)0x41, (byte)0x85, (byte)0xab, (byte)0x3f,
(byte)0xf4, (byte)0x73, (byte)0x2e, (byte)0xae,
(byte)0xa9, (byte)0x21, (byte)0x8b, (byte)0x5e,
(byte)0x95, (byte)0x15, (byte)0xa2, (byte)0x86,
(byte)0x63, (byte)0x0d, (byte)0xba, (byte)0x01,
(byte)0xcb, (byte)0xe3, (byte)0x68, (byte)0xc6,
(byte)0xaf, (byte)0x56, (byte)0x51, (byte)0x7b,
(byte)0xa8, (byte)0x85, (byte)0x3f, (byte)0x01,
(byte)0x80, (byte)0x8b, (byte)0x1f, (byte)0xb4,
(byte)0x4c, (byte)0x93, (byte)0x6b, (byte)0x42,
(byte)0xa6, (byte)0xbd, (byte)0x67, (byte)0x2a,
(byte)0x95, (byte)0x05, (byte)0xff, (byte)0x03,
(byte)0x2e, (byte)0x6f, (byte)0xd4, (byte)0xd3,
(byte)0xf0, (byte)0x17, (byte)0xde, (byte)0xcb,
(byte)0x7d, (byte)0xd9, (byte)0x42, (byte)0x4d,
(byte)0x97, (byte)0x2c, (byte)0x53, (byte)0xe6,
(byte)0x39, (byte)0x61, (byte)0xd2, (byte)0x69,
(byte)0xd1, (byte)0x1c, (byte)0x9a, (byte)0x8b,
(byte)0x5b, (byte)0x9c, (byte)0xfa, (byte)0xfa,
(byte)0x50, (byte)0x50, (byte)0xbb, (byte)0xe4,
(byte)0x2e, (byte)0x83, (byte)0x06, (byte)0x08,
(byte)0x96, (byte)0x2a, (byte)0x68
});
privateKeyEncoding.put("DH", new byte[] {
(byte) 0x30, (byte) 0xffffff81, (byte) 0xffffffe1, (byte) 0x2,
(byte) 0x1, (byte) 0x0, (byte) 0x30, (byte) 0xffffff81,
(byte) 0xffffff97, (byte) 0x6, (byte) 0x9, (byte) 0x2a,
(byte) 0xffffff86, (byte) 0x48, (byte) 0xffffff86,
(byte) 0xfffffff7, (byte) 0xd, (byte) 0x1, (byte) 0x3,
(byte) 0x1, (byte) 0x30, (byte) 0xffffff81, (byte) 0xffffff89,
(byte) 0x2, (byte) 0x41, (byte) 0x0, (byte) 0xfffffff0,
(byte) 0xffffffaa, (byte) 0x22, (byte) 0x5a, (byte) 0x29,
(byte) 0xffffffb2, (byte) 0x3f, (byte) 0xffffffc9, (byte) 0xb,
(byte) 0xffffff87, (byte) 0x5d, (byte) 0xffffff91, (byte) 0x51,
(byte) 0x1, (byte) 0xffffffa4, (byte) 0xffffffb9, (byte) 0x4e,
(byte) 0x1e, (byte) 0xffffff85, (byte) 0xfffffffc,
(byte) 0xffffffa6, (byte) 0x5a, (byte) 0xffffff96,
(byte) 0xffffffb1, (byte) 0xffffffcb, (byte) 0xffffff81,
(byte) 0xffffffa3, (byte) 0x6e, (byte) 0xffffff90,
(byte) 0xffffffbd, (byte) 0xffffffa2, (byte) 0xe,
(byte) 0xffffffb4, (byte) 0xffffffba, (byte) 0x2c, (byte) 0x45,
(byte) 0x9, (byte) 0x1c, (byte) 0xffffff98, (byte) 0x39,
(byte) 0x26, (byte) 0x24, (byte) 0x40, (byte) 0xffffff80,
(byte) 0xffffffce, (byte) 0x15, (byte) 0xffffff8b,
(byte) 0xffffffe1, (byte) 0x67, (byte) 0x48, (byte) 0xfffffff3,
(byte) 0x70, (byte) 0xffffff98, (byte) 0xffffffca,
(byte) 0xffffffa7, (byte) 0x71, (byte) 0x33, (byte) 0xffffffb6,
(byte) 0x4, (byte) 0x13, (byte) 0xffffffe5, (byte) 0x61,
(byte) 0x3c, (byte) 0x1f, (byte) 0x2, (byte) 0x40, (byte) 0x1e,
(byte) 0xffffffd8, (byte) 0x6f, (byte) 0xffffffce, (byte) 0x23,
(byte) 0x71, (byte) 0x6a, (byte) 0x2a, (byte) 0xffffffa3,
(byte) 0x4d, (byte) 0x62, (byte) 0xffffffe9, (byte) 0x5f,
(byte) 0x17, (byte) 0xffffffa8, (byte) 0xffffffe8,
(byte) 0xffffffaa, (byte) 0xffffff8a, (byte) 0xffffff95,
(byte) 0x26, (byte) 0x7c, (byte) 0x38, (byte) 0xffffffa9,
(byte) 0x2b, (byte) 0x48, (byte) 0x5a, (byte) 0x16,
(byte) 0x19, (byte) 0xfffffffa, (byte) 0xffffff83,
(byte) 0xffffffb8, (byte) 0x76, (byte) 0xffffffaf,
(byte) 0xffffffb8, (byte) 0x62, (byte) 0x72, (byte) 0x45,
(byte) 0xffffff9f, (byte) 0xffffff95, (byte) 0x1e, (byte) 0x62,
(byte) 0x36, (byte) 0xffffff97, (byte) 0xffffffbf,
(byte) 0xffffffab, (byte) 0x20, (byte) 0xffffffb0, (byte) 0x61,
(byte) 0xffffffc5, (byte) 0x21, (byte) 0xffffff9e,
(byte) 0xffffffe4, (byte) 0xffffffde, (byte) 0xffffff91,
(byte) 0x1c, (byte) 0x6a, (byte) 0x7, (byte) 0x48, (byte) 0x77,
(byte) 0x70, (byte) 0x1d, (byte) 0xffffffff, (byte) 0x58,
(byte) 0x23, (byte) 0x2, (byte) 0x2, (byte) 0x1,
(byte) 0xffffffff, (byte) 0x4, (byte) 0x42, (byte) 0x2,
(byte) 0x40, (byte) 0x69, (byte) 0xffffff86, (byte) 0x48,
(byte) 0x57, (byte) 0xffffffbf, (byte) 0xffffffde, (byte) 0x8,
(byte) 0xffffffc6, (byte) 0x24, (byte) 0x6d, (byte) 0xf,
(byte) 0x20, (byte) 0xffffff94, (byte) 0x4a, (byte) 0x22,
(byte) 0x6e, (byte) 0x24, (byte) 0x60, (byte) 0xffffffd9,
(byte) 0xffffffa9, (byte) 0xffffffbd, (byte) 0x1e, (byte) 0x64,
(byte) 0xffffff89, (byte) 0xffffff83, (byte) 0x3c,
(byte) 0xffffffe7, (byte) 0x70, (byte) 0x24, (byte) 0xffffffe1,
(byte) 0xffffff8f, (byte) 0x3c, (byte) 0x4d, (byte) 0x39,
(byte) 0x5f, (byte) 0xffffff9e, (byte) 0xffffff93, (byte) 0x13,
(byte) 0xffffff86, (byte) 0xffffffe9, (byte) 0xffffff80,
(byte) 0xf, (byte) 0xffffffc4, (byte) 0x41, (byte) 0xffffff8b,
(byte) 0xfffffff4, (byte) 0xffffff8b, (byte) 0x65,
(byte) 0xffffffa4, (byte) 0x1b, (byte) 0xd, (byte) 0x4,
(byte) 0x48, (byte) 0x40, (byte) 0xffffffd6, (byte) 0xffffffa2,
(byte) 0x0, (byte) 0xffffff85, (byte) 0xffffffe9,
(byte) 0xffffffc4, (byte) 0x77, (byte) 0xffffffb2, (byte) 0x25,
(byte) 0xffffffd8
});
publicKeyEncoding.put("DH", new byte[] {
(byte) 0x30, (byte) 0xffffff81, (byte) 0xffffffe0, (byte) 0x30,
(byte) 0xffffff81, (byte) 0xffffff97, (byte) 0x6, (byte) 0x9,
(byte) 0x2a, (byte) 0xffffff86, (byte) 0x48, (byte) 0xffffff86,
(byte) 0xfffffff7, (byte) 0xd, (byte) 0x1, (byte) 0x3,
(byte) 0x1, (byte) 0x30, (byte) 0xffffff81, (byte) 0xffffff89,
(byte) 0x2, (byte) 0x41, (byte) 0x0, (byte) 0xfffffff0,
(byte) 0xffffffaa, (byte) 0x22, (byte) 0x5a, (byte) 0x29,
(byte) 0xffffffb2, (byte) 0x3f, (byte) 0xffffffc9, (byte) 0xb,
(byte) 0xffffff87, (byte) 0x5d, (byte) 0xffffff91, (byte) 0x51,
(byte) 0x1, (byte) 0xffffffa4, (byte) 0xffffffb9, (byte) 0x4e,
(byte) 0x1e, (byte) 0xffffff85, (byte) 0xfffffffc,
(byte) 0xffffffa6, (byte) 0x5a, (byte) 0xffffff96,
(byte) 0xffffffb1, (byte) 0xffffffcb, (byte) 0xffffff81,
(byte) 0xffffffa3, (byte) 0x6e, (byte) 0xffffff90,
(byte) 0xffffffbd, (byte) 0xffffffa2, (byte) 0xe,
(byte) 0xffffffb4, (byte) 0xffffffba, (byte) 0x2c, (byte) 0x45,
(byte) 0x9, (byte) 0x1c, (byte) 0xffffff98, (byte) 0x39,
(byte) 0x26, (byte) 0x24, (byte) 0x40, (byte) 0xffffff80,
(byte) 0xffffffce, (byte) 0x15, (byte) 0xffffff8b,
(byte) 0xffffffe1, (byte) 0x67, (byte) 0x48, (byte) 0xfffffff3,
(byte) 0x70, (byte) 0xffffff98, (byte) 0xffffffca,
(byte) 0xffffffa7, (byte) 0x71, (byte) 0x33, (byte) 0xffffffb6,
(byte) 0x4, (byte) 0x13, (byte) 0xffffffe5, (byte) 0x61,
(byte) 0x3c, (byte) 0x1f, (byte) 0x2, (byte) 0x40, (byte) 0x1e,
(byte) 0xffffffd8, (byte) 0x6f, (byte) 0xffffffce, (byte) 0x23,
(byte) 0x71, (byte) 0x6a, (byte) 0x2a, (byte) 0xffffffa3,
(byte) 0x4d, (byte) 0x62, (byte) 0xffffffe9, (byte) 0x5f,
(byte) 0x17, (byte) 0xffffffa8, (byte) 0xffffffe8,
(byte) 0xffffffaa, (byte) 0xffffff8a, (byte) 0xffffff95,
(byte) 0x26, (byte) 0x7c, (byte) 0x38, (byte) 0xffffffa9,
(byte) 0x2b, (byte) 0x48, (byte) 0x5a, (byte) 0x16,
(byte) 0x19, (byte) 0xfffffffa, (byte) 0xffffff83,
(byte) 0xffffffb8, (byte) 0x76, (byte) 0xffffffaf,
(byte) 0xffffffb8, (byte) 0x62, (byte) 0x72, (byte) 0x45,
(byte) 0xffffff9f, (byte) 0xffffff95, (byte) 0x1e, (byte) 0x62,
(byte) 0x36, (byte) 0xffffff97, (byte) 0xffffffbf,
(byte) 0xffffffab, (byte) 0x20, (byte) 0xffffffb0, (byte) 0x61,
(byte) 0xffffffc5, (byte) 0x21, (byte) 0xffffff9e,
(byte) 0xffffffe4, (byte) 0xffffffde, (byte) 0xffffff91,
(byte) 0x1c, (byte) 0x6a, (byte) 0x7, (byte) 0x48, (byte) 0x77,
(byte) 0x70, (byte) 0x1d, (byte) 0xffffffff, (byte) 0x58,
(byte) 0x23, (byte) 0x2, (byte) 0x2, (byte) 0x1,
(byte) 0xffffffff, (byte) 0x3, (byte) 0x44, (byte) 0x0,
(byte) 0x2, (byte) 0x41, (byte) 0x0, (byte) 0xffffff9d,
(byte) 0xffffffc4, (byte) 0xffffffcd, (byte) 0x10,
(byte) 0xffffffdf, (byte) 0x66, (byte) 0xffffff92,
(byte) 0xffffffe1, (byte) 0x33, (byte) 0xffffffb1,
(byte) 0xffffffc9, (byte) 0xffffff9f, (byte) 0xffffffb7,
(byte) 0xffffffdd, (byte) 0xffffff84, (byte) 0x4b,
(byte) 0xffffffe5, (byte) 0xffffff86, (byte) 0xfffffff0,
(byte) 0x53, (byte) 0x2a, (byte) 0xffffffd5, (byte) 0xffffffc6,
(byte) 0x15, (byte) 0xffffff94, (byte) 0xffffffae, (byte) 0x13,
(byte) 0x7b, (byte) 0xffffff9d, (byte) 0x37, (byte) 0xffffff8b,
(byte) 0xffffffc6, (byte) 0xffffffc6, (byte) 0x78,
(byte) 0xffffff9c, (byte) 0x60, (byte) 0xffffff8a, (byte) 0x6f,
(byte) 0x35, (byte) 0x39, (byte) 0xffffffe0, (byte) 0x78,
(byte) 0x33, (byte) 0x60, (byte) 0xffffff89, (byte) 0x30,
(byte) 0x61, (byte) 0xffffff84, (byte) 0xffffff8a,
(byte) 0xffffffbc, (byte) 0xffffff80, (byte) 0x6c, (byte) 0x1c,
(byte) 0x55, (byte) 0xffffff96, (byte) 0x50, (byte) 0xffffffb1,
(byte) 0xffffff96, (byte) 0x5, (byte) 0x21, (byte) 0x65,
(byte) 0x55, (byte) 0xffffffbb, (byte) 0xffffffa4
});
}
public TestKeyPair(String algorithmName) throws
NoSuchAlgorithmException {
this.algorithmName = algorithmName;
if (!privateKeyEncoding.containsKey(this.algorithmName)) {
throw new NoSuchAlgorithmException("Encoded form not available for " +
this.algorithmName);
}
kf = KeyFactory.getInstance(this.algorithmName);
}
public PublicKey getPublic() throws
InvalidKeySpecException {
return kf.generatePublic(
new X509EncodedKeySpec(
(byte[])publicKeyEncoding.get(algorithmName)));
}
public PrivateKey getPrivate() throws
InvalidKeySpecException {
return kf.generatePrivate(
new PKCS8EncodedKeySpec(
(byte[])privateKeyEncoding.get(algorithmName)));
}
}
|
apache/juneau | 35,181 | juneau-core/juneau-marshall/src/main/java/org/apache/juneau/jsonschema/JsonSchemaGenerator.java | // ***************************************************************************************************************************
// * Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file *
// * distributed with this work for additional information regarding copyright ownership. The ASF licenses this file *
// * to you under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance *
// * with the License. You may obtain a copy of the License at *
// * *
// * http://www.apache.org/licenses/LICENSE-2.0 *
// * *
// * Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an *
// * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the *
// * specific language governing permissions and limitations under the License. *
// ***************************************************************************************************************************
package org.apache.juneau.jsonschema;
import static java.util.Collections.*;
import static org.apache.juneau.collections.JsonMap.*;
import static org.apache.juneau.common.utils.ThrowableUtils.*;
import static org.apache.juneau.common.utils.Utils.*;
import static org.apache.juneau.internal.CollectionUtils.addAll;
import java.lang.annotation.*;
import java.util.*;
import java.util.concurrent.*;
import java.util.regex.*;
import org.apache.juneau.*;
import org.apache.juneau.annotation.*;
import org.apache.juneau.collections.*;
import org.apache.juneau.common.utils.*;
import org.apache.juneau.internal.*;
import org.apache.juneau.json.*;
import org.apache.juneau.utils.*;
/**
* Generates JSON-schema metadata about POJOs.
*
* <h5 class='section'>Notes:</h5><ul>
* <li class='note'>This class is thread safe and reusable.
* </ul>
*
* <p>
* <h5 class='section'>See Also:</h5><ul>
* <li class='link'><a class="doclink" href="https://juneau.apache.org/docs/topics/JsonSchemaDetails">JSON-Schema Support</a>
* </ul>
*/
public class JsonSchemaGenerator extends BeanTraverseContext implements JsonSchemaMetaProvider {
//-------------------------------------------------------------------------------------------------------------------
// Static
//-------------------------------------------------------------------------------------------------------------------
/** Default serializer, all default settings.*/
public static final JsonSchemaGenerator DEFAULT = new JsonSchemaGenerator(create());
/**
* Creates a new builder for this object.
*
* @return A new builder.
*/
public static Builder create() {
return new Builder();
}
//-------------------------------------------------------------------------------------------------------------------
// Builder
//-------------------------------------------------------------------------------------------------------------------
/**
* Builder class.
*/
@FluentSetters
public static class Builder extends BeanTraverseContext.Builder {
private static final Cache<HashKey,JsonSchemaGenerator> CACHE = Cache.of(HashKey.class, JsonSchemaGenerator.class).build();
final JsonSerializer.Builder jsonSerializerBuilder;
final JsonParser.Builder jsonParserBuilder;
SortedSet<TypeCategory> addDescriptionsTo, addExamplesTo;
boolean allowNestedDescriptions, allowNestedExamples, useBeanDefs;
Class<? extends BeanDefMapper> beanDefMapper;
SortedSet<String> ignoreTypes;
/**
* Constructor, default settings.
*/
protected Builder() {
BeanContext.Builder bc = beanContext();
jsonSerializerBuilder = JsonSerializer.create().beanContext(bc);
jsonParserBuilder = JsonParser.create().beanContext(bc);
registerBuilders(jsonSerializerBuilder, jsonParserBuilder);
addDescriptionsTo = null;
addExamplesTo = null;
allowNestedDescriptions = env("JsonSchemaGenerator.allowNestedDescriptions", false);
allowNestedExamples = env("JsonSchemaGenerator.allowNestedExamples", false);
useBeanDefs = env("JsonSchemaGenerator.useBeanDefs", false);
beanDefMapper = BasicBeanDefMapper.class;
ignoreTypes = null;
}
/**
* Copy constructor.
*
* @param copyFrom The bean to copy from.
*/
protected Builder(JsonSchemaGenerator copyFrom) {
super(copyFrom);
BeanContext.Builder bc = beanContext();
jsonSerializerBuilder = copyFrom.jsonSerializer.copy().beanContext(bc);
jsonParserBuilder = copyFrom.jsonParser.copy().beanContext(bc);
registerBuilders(jsonSerializerBuilder, jsonParserBuilder);
addDescriptionsTo = copyFrom.addDescriptionsTo.isEmpty() ? null : new TreeSet<>(copyFrom.addDescriptionsTo);
addExamplesTo = copyFrom.addExamplesTo.isEmpty() ? null : new TreeSet<>(copyFrom.addExamplesTo);
allowNestedDescriptions = copyFrom.allowNestedDescriptions;
allowNestedExamples = copyFrom.allowNestedExamples;
useBeanDefs = copyFrom.useBeanDefs;
beanDefMapper = copyFrom.beanDefMapper;
ignoreTypes = copyFrom.ignoreTypes.isEmpty() ? null : new TreeSet<>(copyFrom.ignoreTypes);
}
/**
* Copy constructor.
*
* @param copyFrom The builder to copy from.
*/
protected Builder(Builder copyFrom) {
super(copyFrom);
BeanContext.Builder bc = beanContext();
jsonSerializerBuilder = copyFrom.jsonSerializerBuilder.copy().beanContext(bc);
jsonParserBuilder = copyFrom.jsonParserBuilder.copy().beanContext(bc);
registerBuilders(jsonSerializerBuilder, jsonParserBuilder);
addDescriptionsTo = copyFrom.addDescriptionsTo == null ? null : new TreeSet<>(copyFrom.addDescriptionsTo);
addExamplesTo = copyFrom.addExamplesTo == null ? null : new TreeSet<>(copyFrom.addExamplesTo);
allowNestedDescriptions = copyFrom.allowNestedDescriptions;
allowNestedExamples = copyFrom.allowNestedExamples;
useBeanDefs = copyFrom.useBeanDefs;
beanDefMapper = copyFrom.beanDefMapper;
ignoreTypes = copyFrom.ignoreTypes == null ? null : new TreeSet<>(copyFrom.ignoreTypes);
}
@Override /* Context.Builder */
public Builder copy() {
return new Builder(this);
}
@Override /* Context.Builder */
public JsonSchemaGenerator build() {
return cache(CACHE).build(JsonSchemaGenerator.class);
}
@Override /* Context.Builder */
public HashKey hashKey() {
return HashKey.of(
super.hashKey(),
jsonSerializerBuilder.hashKey(),
jsonParserBuilder.hashKey(),
addDescriptionsTo,
addExamplesTo,
allowNestedDescriptions,
allowNestedExamples,
useBeanDefs,
beanDefMapper,
ignoreTypes
);
}
//-----------------------------------------------------------------------------------------------------------------
// Properties
//-----------------------------------------------------------------------------------------------------------------
/**
* Add descriptions.
*
* <p>
* Identifies which categories of types that descriptions should be automatically added to generated schemas.
* The description is the result of calling {@link ClassMeta#getFullName()}.
* The format is a comma-delimited list of any of the following values:
*
* <ul class='javatree'>
* <li class='jf'>{@link TypeCategory#BEAN BEAN}
* <li class='jf'>{@link TypeCategory#COLLECTION COLLECTION}
* <li class='jf'>{@link TypeCategory#ARRAY ARRAY}
* <li class='jf'>{@link TypeCategory#MAP MAP}
* <li class='jf'>{@link TypeCategory#STRING STRING}
* <li class='jf'>{@link TypeCategory#NUMBER NUMBER}
* <li class='jf'>{@link TypeCategory#BOOLEAN BOOLEAN}
* <li class='jf'>{@link TypeCategory#ANY ANY}
* <li class='jf'>{@link TypeCategory#OTHER OTHER}
* </ul>
*
* @param values
* The values to add to this setting.
* <br>The default is an empty string.
* @return This object.
*/
@FluentSetter
public Builder addDescriptionsTo(TypeCategory...values) {
addDescriptionsTo = addAll(addDescriptionsTo, values);
return this;
}
/**
* Add examples.
*
* <p>
* Identifies which categories of types that examples should be automatically added to generated schemas.
* <p>
* The examples come from calling {@link ClassMeta#getExample(BeanSession,JsonParserSession)} which in turn gets examples
* from the following:
* <ul class='javatree'>
* <li class='ja'>{@link Example}
* <li class='ja'>{@link Marshalled#example() Marshalled(example)}
* </ul>
*
* <p>
* The format is a comma-delimited list of any of the following values:
*
* <ul class='javatree'>
* <li class='jf'>{@link TypeCategory#BEAN BEAN}
* <li class='jf'>{@link TypeCategory#COLLECTION COLLECTION}
* <li class='jf'>{@link TypeCategory#ARRAY ARRAY}
* <li class='jf'>{@link TypeCategory#MAP MAP}
* <li class='jf'>{@link TypeCategory#STRING STRING}
* <li class='jf'>{@link TypeCategory#NUMBER NUMBER}
* <li class='jf'>{@link TypeCategory#BOOLEAN BOOLEAN}
* <li class='jf'>{@link TypeCategory#ANY ANY}
* <li class='jf'>{@link TypeCategory#OTHER OTHER}
* </ul>
*
* @param values
* The values to add to this setting.
* <br>The default is an empty string.
* @return This object.
*/
@FluentSetter
public Builder addExamplesTo(TypeCategory...values) {
addExamplesTo = addAll(addExamplesTo, values);
return this;
}
/**
* Allow nested descriptions.
*
* <p>
* Identifies whether nested descriptions are allowed in schema definitions.
*
* @return This object.
*/
@FluentSetter
public Builder allowNestedDescriptions() {
return allowNestedDescriptions(true);
}
/**
* Same as {@link #allowNestedDescriptions()} but allows you to explicitly specify the value.
*
* @param value The value for this setting.
* @return This object.
*/
@FluentSetter
public Builder allowNestedDescriptions(boolean value) {
allowNestedDescriptions = value;
return this;
}
/**
* Allow nested examples.
*
* <p>
* Identifies whether nested examples are allowed in schema definitions.
*
* @return This object.
*/
@FluentSetter
public Builder allowNestedExamples() {
return allowNestedExamples(true);
}
/**
* Same as {@link #allowNestedExamples()} but allows you to explicitly specify the value.
*
* @param value The value for this setting.
* @return This object.
*/
@FluentSetter
public Builder allowNestedExamples(boolean value) {
allowNestedExamples = value;
return this;
}
/**
* Schema definition mapper.
*
* <p>
* Interface to use for converting Bean classes to definition IDs and URIs.
* <p>
* Used primarily for defining common definition sections for beans in Swagger JSON.
* <p>
* This setting is ignored if {@link JsonSchemaGenerator.Builder#useBeanDefs()} is not enabled.
*
* @param value
* The new value for this setting.
* <br>The default is {@link org.apache.juneau.jsonschema.BasicBeanDefMapper}.
* @return This object.
*/
@FluentSetter
public Builder beanDefMapper(Class<? extends BeanDefMapper> value) {
beanDefMapper = value;
return this;
}
/**
* Ignore types from schema definitions.
*
* <h5 class='section'>Description:</h5>
* <p>
* Defines class name patterns that should be ignored when generating schema definitions in the generated
* Swagger documentation.
*
* <h5 class='section'>Example:</h5>
* <p class='bjava'>
* <jc>// Don't generate schema for any prototype packages or the class named 'Swagger'.</jc>
* <ja>@JsonSchemaConfig</ja>(
* ignoreTypes=<js>"Swagger,*.proto.*"</js>
* )
* <jk>public class</jk> MyResource {...}
* </p>
*
* @param values
* The values to add.
* @return This object.
*/
@FluentSetter
public Builder ignoreTypes(String...values) {
ignoreTypes = addAll(ignoreTypes, values);
return this;
}
/**
* Use bean definitions.
*
* <p>
* When enabled, schemas on beans will be serialized as the following:
* <p class='bjson'>
* {
* type: <js>'object'</js>,
* <js>'$ref'</js>: <js>'#/definitions/TypeId'</js>
* }
* </p>
*
* <p>
* The definitions can then be retrieved from the session using {@link JsonSchemaGeneratorSession#getBeanDefs()}.
* <p>
* Definitions can also be added programmatically using {@link JsonSchemaGeneratorSession#addBeanDef(String, JsonMap)}.
*
* @return This object.
*/
@FluentSetter
public Builder useBeanDefs() {
return useBeanDefs(true);
}
/**
* Same as {@link #useBeanDefs()} but allows you to explicitly specify the value.
*
* @param value The value for this setting.
* @return This object.
*/
@FluentSetter
public Builder useBeanDefs(boolean value) {
useBeanDefs = value;
return this;
}
/**
* Gives access to the inner JSON serializer builder if you want to modify the serializer settings.
*
* @return The JSON serializer builder.
*/
public JsonSerializer.Builder getJsonSerializerBuilder() {
return jsonSerializerBuilder;
}
/**
* Gives access to the inner JSON parser builder if you want to modify the parser settings.
*
* @return The JSON serializer builder.
*/
public JsonParser.Builder getJsonParserBuilder() {
return jsonParserBuilder;
}
// <FluentSetters>
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder annotations(Annotation...values) {
super.annotations(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder apply(AnnotationWorkList work) {
super.apply(work);
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder applyAnnotations(Object...from) {
super.applyAnnotations(from);
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder applyAnnotations(Class<?>...from) {
super.applyAnnotations(from);
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder cache(Cache<HashKey,? extends org.apache.juneau.Context> value) {
super.cache(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder debug() {
super.debug();
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder debug(boolean value) {
super.debug(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder impl(Context value) {
super.impl(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.Context.Builder */
public Builder type(Class<? extends org.apache.juneau.Context> value) {
super.type(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanClassVisibility(Visibility value) {
super.beanClassVisibility(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanConstructorVisibility(Visibility value) {
super.beanConstructorVisibility(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanContext(BeanContext value) {
super.beanContext(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanContext(BeanContext.Builder value) {
super.beanContext(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanDictionary(java.lang.Class<?>...values) {
super.beanDictionary(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanFieldVisibility(Visibility value) {
super.beanFieldVisibility(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanInterceptor(Class<?> on, Class<? extends org.apache.juneau.swap.BeanInterceptor<?>> value) {
super.beanInterceptor(on, value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanMapPutReturnsOldValue() {
super.beanMapPutReturnsOldValue();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanMethodVisibility(Visibility value) {
super.beanMethodVisibility(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanProperties(Map<String,Object> values) {
super.beanProperties(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanProperties(Class<?> beanClass, String properties) {
super.beanProperties(beanClass, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanProperties(String beanClassName, String properties) {
super.beanProperties(beanClassName, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesExcludes(Map<String,Object> values) {
super.beanPropertiesExcludes(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesExcludes(Class<?> beanClass, String properties) {
super.beanPropertiesExcludes(beanClass, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesExcludes(String beanClassName, String properties) {
super.beanPropertiesExcludes(beanClassName, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesReadOnly(Map<String,Object> values) {
super.beanPropertiesReadOnly(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesReadOnly(Class<?> beanClass, String properties) {
super.beanPropertiesReadOnly(beanClass, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesReadOnly(String beanClassName, String properties) {
super.beanPropertiesReadOnly(beanClassName, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesWriteOnly(Map<String,Object> values) {
super.beanPropertiesWriteOnly(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesWriteOnly(Class<?> beanClass, String properties) {
super.beanPropertiesWriteOnly(beanClass, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beanPropertiesWriteOnly(String beanClassName, String properties) {
super.beanPropertiesWriteOnly(beanClassName, properties);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beansRequireDefaultConstructor() {
super.beansRequireDefaultConstructor();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beansRequireSerializable() {
super.beansRequireSerializable();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder beansRequireSettersForGetters() {
super.beansRequireSettersForGetters();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder dictionaryOn(Class<?> on, java.lang.Class<?>...values) {
super.dictionaryOn(on, values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder disableBeansRequireSomeProperties() {
super.disableBeansRequireSomeProperties();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder disableIgnoreMissingSetters() {
super.disableIgnoreMissingSetters();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder disableIgnoreTransientFields() {
super.disableIgnoreTransientFields();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder disableIgnoreUnknownNullBeanProperties() {
super.disableIgnoreUnknownNullBeanProperties();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder disableInterfaceProxies() {
super.disableInterfaceProxies();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public <T> Builder example(Class<T> pojoClass, T o) {
super.example(pojoClass, o);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public <T> Builder example(Class<T> pojoClass, String json) {
super.example(pojoClass, json);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder findFluentSetters() {
super.findFluentSetters();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder findFluentSetters(Class<?> on) {
super.findFluentSetters(on);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder ignoreInvocationExceptionsOnGetters() {
super.ignoreInvocationExceptionsOnGetters();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder ignoreInvocationExceptionsOnSetters() {
super.ignoreInvocationExceptionsOnSetters();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder ignoreUnknownBeanProperties() {
super.ignoreUnknownBeanProperties();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder ignoreUnknownEnumValues() {
super.ignoreUnknownEnumValues();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder implClass(Class<?> interfaceClass, Class<?> implClass) {
super.implClass(interfaceClass, implClass);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder implClasses(Map<Class<?>,Class<?>> values) {
super.implClasses(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder interfaceClass(Class<?> on, Class<?> value) {
super.interfaceClass(on, value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder interfaces(java.lang.Class<?>...value) {
super.interfaces(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder locale(Locale value) {
super.locale(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder mediaType(MediaType value) {
super.mediaType(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder notBeanClasses(java.lang.Class<?>...values) {
super.notBeanClasses(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder notBeanPackages(String...values) {
super.notBeanPackages(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder propertyNamer(Class<? extends org.apache.juneau.PropertyNamer> value) {
super.propertyNamer(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder propertyNamer(Class<?> on, Class<? extends org.apache.juneau.PropertyNamer> value) {
super.propertyNamer(on, value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder sortProperties() {
super.sortProperties();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder sortProperties(java.lang.Class<?>...on) {
super.sortProperties(on);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder stopClass(Class<?> on, Class<?> value) {
super.stopClass(on, value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public <T, S> Builder swap(Class<T> normalClass, Class<S> swappedClass, ThrowingFunction<T,S> swapFunction) {
super.swap(normalClass, swappedClass, swapFunction);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public <T, S> Builder swap(Class<T> normalClass, Class<S> swappedClass, ThrowingFunction<T,S> swapFunction, ThrowingFunction<S,T> unswapFunction) {
super.swap(normalClass, swappedClass, swapFunction, unswapFunction);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder swaps(Object...values) {
super.swaps(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder swaps(Class<?>...values) {
super.swaps(values);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder timeZone(TimeZone value) {
super.timeZone(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder typeName(Class<?> on, String value) {
super.typeName(on, value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder typePropertyName(String value) {
super.typePropertyName(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder typePropertyName(Class<?> on, String value) {
super.typePropertyName(on, value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder useEnumNames() {
super.useEnumNames();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanContextable.Builder */
public Builder useJavaBeanIntrospector() {
super.useJavaBeanIntrospector();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanTraverseContext.Builder */
public Builder detectRecursions() {
super.detectRecursions();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanTraverseContext.Builder */
public Builder detectRecursions(boolean value) {
super.detectRecursions(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanTraverseContext.Builder */
public Builder ignoreRecursions() {
super.ignoreRecursions();
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanTraverseContext.Builder */
public Builder ignoreRecursions(boolean value) {
super.ignoreRecursions(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanTraverseContext.Builder */
public Builder initialDepth(int value) {
super.initialDepth(value);
return this;
}
@Override /* GENERATED - org.apache.juneau.BeanTraverseContext.Builder */
public Builder maxDepth(int value) {
super.maxDepth(value);
return this;
}
// </FluentSetters>
}
//-------------------------------------------------------------------------------------------------------------------
// Instance
//-------------------------------------------------------------------------------------------------------------------
final boolean useBeanDefs, allowNestedExamples, allowNestedDescriptions;
final Set<TypeCategory> addExamplesTo, addDescriptionsTo;
final Class<? extends BeanDefMapper> beanDefMapper;
final Set<String> ignoreTypes;
private final BeanDefMapper beanDefMapperBean;
final JsonSerializer jsonSerializer;
final JsonParser jsonParser;
private final Pattern[] ignoreTypePatterns;
private final Map<ClassMeta<?>,JsonSchemaClassMeta> jsonSchemaClassMetas = new ConcurrentHashMap<>();
private final Map<BeanPropertyMeta,JsonSchemaBeanPropertyMeta> jsonSchemaBeanPropertyMetas = new ConcurrentHashMap<>();
/**
* Constructor.
*
* @param builder The builder for this object.
*/
public JsonSchemaGenerator(Builder builder) {
super(builder.detectRecursions().ignoreRecursions());
useBeanDefs = builder.useBeanDefs;
allowNestedExamples = builder.allowNestedExamples;
allowNestedDescriptions = builder.allowNestedDescriptions;
beanDefMapper = builder.beanDefMapper;
addExamplesTo = builder.addExamplesTo == null ? emptySet() : new TreeSet<>(builder.addExamplesTo);
addDescriptionsTo = builder.addDescriptionsTo == null ? emptySet() : new TreeSet<>(builder.addDescriptionsTo);
ignoreTypes = builder.ignoreTypes == null ? emptySet() : new TreeSet<>(builder.ignoreTypes);
Set<Pattern> ignoreTypePatterns = Utils.set();
ignoreTypes.forEach(y -> Utils.split(y, x -> ignoreTypePatterns.add(Pattern.compile(x.replace(".", "\\.").replace("*", ".*")))));
this.ignoreTypePatterns = ignoreTypePatterns.toArray(new Pattern[ignoreTypePatterns.size()]);
try {
beanDefMapperBean = beanDefMapper.getDeclaredConstructor().newInstance();
} catch (Exception e) {
throw asRuntimeException(e);
}
jsonSerializer = builder.jsonSerializerBuilder.build();
jsonParser = builder.jsonParserBuilder.beanContext(getBeanContext()).build();
}
@Override /* Context */
public Builder copy() {
return new Builder(this);
}
@Override /* Context */
public JsonSchemaGeneratorSession.Builder createSession() {
return JsonSchemaGeneratorSession.create(this);
}
@Override /* Context */
public JsonSchemaGeneratorSession getSession() {
return createSession().build();
}
JsonSerializer getJsonSerializer() {
return jsonSerializer;
}
JsonParser getJsonParser() {
return jsonParser;
}
//-----------------------------------------------------------------------------------------------------------------
// Properties
//-----------------------------------------------------------------------------------------------------------------
/**
* Add descriptions to types.
*
* @see Builder#addDescriptionsTo(TypeCategory...)
* @return
* Set of categories of types that descriptions should be automatically added to generated schemas.
*/
protected final Set<TypeCategory> getAddDescriptionsTo() {
return addDescriptionsTo;
}
/**
* Add examples.
*
* @see Builder#addExamplesTo(TypeCategory...)
* @return
* Set of categories of types that examples should be automatically added to generated schemas.
*/
protected final Set<TypeCategory> getAddExamplesTo() {
return addExamplesTo;
}
/**
* Allow nested descriptions.
*
* @see Builder#allowNestedDescriptions()
* @return
* <jk>true</jk> if nested descriptions are allowed in schema definitions.
*/
protected final boolean isAllowNestedDescriptions() {
return allowNestedDescriptions;
}
/**
* Allow nested examples.
*
* @see Builder#allowNestedExamples()
* @return
* <jk>true</jk> if nested examples are allowed in schema definitions.
*/
protected final boolean isAllowNestedExamples() {
return allowNestedExamples;
}
/**
* Bean schema definition mapper.
*
* @see Builder#beanDefMapper(Class)
* @return
* Interface to use for converting Bean classes to definition IDs and URIs.
*/
protected final BeanDefMapper getBeanDefMapper() {
return beanDefMapperBean;
}
/**
* Ignore types from schema definitions.
*
* @see Builder#ignoreTypes(String...)
* @return
* Custom schema information for particular class types.
*/
public List<Pattern> getIgnoreTypes() {
return Utils.alist(ignoreTypePatterns);
}
/**
* Use bean definitions.
*
* @see Builder#useBeanDefs()
* @return
* <jk>true</jk> if schemas on beans will be serialized with <js>'$ref'</js> tags.
*/
protected final boolean isUseBeanDefs() {
return useBeanDefs;
}
//-----------------------------------------------------------------------------------------------------------------
// Extended metadata
//-----------------------------------------------------------------------------------------------------------------
@Override
public JsonSchemaClassMeta getJsonSchemaClassMeta(ClassMeta<?> cm) {
JsonSchemaClassMeta m = jsonSchemaClassMetas.get(cm);
if (m == null) {
m = new JsonSchemaClassMeta(cm, this);
jsonSchemaClassMetas.put(cm, m);
}
return m;
}
@Override
public JsonSchemaBeanPropertyMeta getJsonSchemaBeanPropertyMeta(BeanPropertyMeta bpm) {
JsonSchemaBeanPropertyMeta m = jsonSchemaBeanPropertyMetas.get(bpm);
if (m == null) {
m = new JsonSchemaBeanPropertyMeta(bpm, this);
jsonSchemaBeanPropertyMetas.put(bpm, m);
}
return m;
}
//-----------------------------------------------------------------------------------------------------------------
// Other methods
//-----------------------------------------------------------------------------------------------------------------
/**
* Returns <jk>true</jk> if the specified type is ignored.
*
* <p>
* The type is ignored if it's specified in the {@link Builder#ignoreTypes(String...)} setting.
* <br>Ignored types return <jk>null</jk> on the call to {@link JsonSchemaGeneratorSession#getSchema(ClassMeta)}.
*
* @param cm The type to check.
* @return <jk>true</jk> if the specified type is ignored.
*/
public boolean isIgnoredType(ClassMeta<?> cm) {
for (Pattern p : ignoreTypePatterns)
if (p.matcher(cm.getSimpleName()).matches() || p.matcher(cm.getName()).matches())
return true;
return false;
}
//-----------------------------------------------------------------------------------------------------------------
// Other methods
//-----------------------------------------------------------------------------------------------------------------
@Override /* Context */
protected JsonMap properties() {
return filteredMap()
.append("useBeanDefs", useBeanDefs)
.append("allowNestedExamples", allowNestedExamples)
.append("allowNestedDescriptions", allowNestedDescriptions)
.append("beanDefMapper", beanDefMapper)
.append("addExamplesTo", addExamplesTo)
.append("addDescriptionsTo", addDescriptionsTo)
.append("ignoreTypes", ignoreTypes);
}
} |
apache/flink-kubernetes-operator | 37,338 | flink-kubernetes-operator/src/test/java/org/apache/flink/kubernetes/operator/controller/FlinkStateSnapshotControllerTest.java | /*
* Licensed to the Apache Software Foundation (ASF) under one or more
* contributor license agreements. See the NOTICE file distributed with
* this work for additional information regarding copyright ownership.
* The ASF licenses this file to You under the Apache License, Version 2.0
* (the "License"); you may not use this file except in compliance with
* the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.flink.kubernetes.operator.controller;
import org.apache.flink.configuration.CheckpointingOptions;
import org.apache.flink.configuration.Configuration;
import org.apache.flink.kubernetes.operator.TestUtils;
import org.apache.flink.kubernetes.operator.TestingFlinkResourceContextFactory;
import org.apache.flink.kubernetes.operator.TestingFlinkService;
import org.apache.flink.kubernetes.operator.api.AbstractFlinkResource;
import org.apache.flink.kubernetes.operator.api.CrdConstants;
import org.apache.flink.kubernetes.operator.api.FlinkDeployment;
import org.apache.flink.kubernetes.operator.api.FlinkStateSnapshot;
import org.apache.flink.kubernetes.operator.api.spec.FlinkVersion;
import org.apache.flink.kubernetes.operator.api.spec.JobReference;
import org.apache.flink.kubernetes.operator.api.status.CheckpointType;
import org.apache.flink.kubernetes.operator.api.status.FlinkStateSnapshotStatus;
import org.apache.flink.kubernetes.operator.api.status.JobStatus;
import org.apache.flink.kubernetes.operator.api.status.SavepointFormatType;
import org.apache.flink.kubernetes.operator.api.status.SnapshotTriggerType;
import org.apache.flink.kubernetes.operator.config.FlinkConfigManager;
import org.apache.flink.kubernetes.operator.exception.ReconciliationException;
import org.apache.flink.kubernetes.operator.metrics.MetricManager;
import org.apache.flink.kubernetes.operator.metrics.TestingMetricListener;
import org.apache.flink.kubernetes.operator.observer.snapshot.StateSnapshotObserver;
import org.apache.flink.kubernetes.operator.reconciler.ReconciliationUtils;
import org.apache.flink.kubernetes.operator.reconciler.SnapshotType;
import org.apache.flink.kubernetes.operator.reconciler.snapshot.StateSnapshotReconciler;
import org.apache.flink.kubernetes.operator.utils.EventRecorder;
import org.apache.flink.kubernetes.operator.utils.FlinkResourceEventCollector;
import org.apache.flink.kubernetes.operator.utils.FlinkStateSnapshotEventCollector;
import org.apache.flink.kubernetes.operator.utils.StatusRecorder;
import org.apache.flink.kubernetes.operator.utils.ValidatorUtils;
import io.fabric8.kubernetes.client.KubernetesClient;
import io.fabric8.kubernetes.client.server.mock.EnableKubernetesMockClient;
import io.javaoperatorsdk.operator.api.reconciler.Context;
import io.javaoperatorsdk.operator.api.reconciler.DeleteControl;
import io.javaoperatorsdk.operator.api.reconciler.UpdateControl;
import org.junit.jupiter.api.BeforeEach;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.EnumSource;
import org.junit.jupiter.params.provider.ValueSource;
import javax.annotation.Nullable;
import java.time.Instant;
import java.util.Map;
import java.util.Optional;
import java.util.function.BiConsumer;
import static org.apache.flink.api.common.JobStatus.CANCELED;
import static org.apache.flink.api.common.JobStatus.RUNNING;
import static org.apache.flink.kubernetes.operator.api.CrdConstants.LABEL_SNAPSHOT_JOB_REFERENCE_KIND;
import static org.apache.flink.kubernetes.operator.api.CrdConstants.LABEL_SNAPSHOT_JOB_REFERENCE_NAME;
import static org.apache.flink.kubernetes.operator.api.CrdConstants.LABEL_SNAPSHOT_STATE;
import static org.apache.flink.kubernetes.operator.api.CrdConstants.LABEL_SNAPSHOT_TRIGGER_TYPE;
import static org.apache.flink.kubernetes.operator.api.CrdConstants.LABEL_SNAPSHOT_TYPE;
import static org.apache.flink.kubernetes.operator.api.status.FlinkStateSnapshotStatus.State.ABANDONED;
import static org.apache.flink.kubernetes.operator.api.status.FlinkStateSnapshotStatus.State.COMPLETED;
import static org.apache.flink.kubernetes.operator.api.status.FlinkStateSnapshotStatus.State.FAILED;
import static org.apache.flink.kubernetes.operator.api.status.FlinkStateSnapshotStatus.State.IN_PROGRESS;
import static org.apache.flink.kubernetes.operator.api.status.FlinkStateSnapshotStatus.State.TRIGGER_PENDING;
import static org.apache.flink.kubernetes.operator.metrics.FlinkStateSnapshotMetricsUtils.assertSnapshotMetrics;
import static org.assertj.core.api.Assertions.assertThat;
import static org.junit.jupiter.api.Assertions.assertThrows;
/** Test class for {@link FlinkStateSnapshotController}. */
@EnableKubernetesMockClient(crud = true)
public class FlinkStateSnapshotControllerTest {
private static final String SAVEPOINT_NAME = "savepoint-test";
private static final String CHECKPOINT_NAME = "checkpoint-test";
private static final String SAVEPOINT_PATH = "/tmp/asd";
private static final String JOB_ID = "fd72014d4c864993a2e5a9287b4a9c5d";
private final FlinkConfigManager configManager = new FlinkConfigManager(new Configuration());
private final StatusUpdateCounter statusUpdateCounter = new StatusUpdateCounter();
private final TestingFlinkService flinkService = new TestingFlinkService();
private KubernetesClient client;
private FlinkStateSnapshotEventCollector flinkStateSnapshotEventCollector;
private EventRecorder eventRecorder;
private TestingFlinkResourceContextFactory ctxFactory;
private TestingMetricListener listener;
private MetricManager<FlinkStateSnapshot> metricManager;
private StatusRecorder<FlinkStateSnapshot, FlinkStateSnapshotStatus> statusRecorder;
private FlinkStateSnapshotController controller;
private Context<FlinkStateSnapshot> context;
@BeforeEach
public void beforeEach() {
flinkStateSnapshotEventCollector = new FlinkStateSnapshotEventCollector();
eventRecorder =
new EventRecorder(
new FlinkResourceEventCollector(), flinkStateSnapshotEventCollector);
ctxFactory =
new TestingFlinkResourceContextFactory(
configManager,
TestUtils.createTestMetricGroup(new Configuration()),
flinkService,
eventRecorder);
listener = new TestingMetricListener(new Configuration());
metricManager =
MetricManager.createFlinkStateSnapshotMetricManager(
new Configuration(), listener.getMetricGroup());
statusRecorder = new StatusRecorder<>(metricManager, statusUpdateCounter);
controller =
new FlinkStateSnapshotController(
ValidatorUtils.discoverValidators(configManager),
ctxFactory,
new StateSnapshotReconciler(ctxFactory, eventRecorder),
new StateSnapshotObserver(ctxFactory, eventRecorder),
eventRecorder,
metricManager,
statusRecorder);
}
@ParameterizedTest
@ValueSource(ints = {0, 3, 7})
public void testReconcileBackoff(int backoffLimit) {
var deployment = createDeployment();
context = TestUtils.createSnapshotContext(client, deployment);
var snapshot = createSavepoint(deployment, false, backoffLimit);
snapshot.setStatus(new FlinkStateSnapshotStatus());
flinkService.setTriggerSavepointFailure(true);
for (int i = 0; i < backoffLimit; i++) {
controller.updateErrorStatus(snapshot, context, new Exception());
assertThat(snapshot.getStatus().getState()).isEqualTo(TRIGGER_PENDING);
}
controller.updateErrorStatus(snapshot, context, new Exception());
assertThat(snapshot.getStatus().getState()).isEqualTo(FAILED);
}
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void testReconcileSavepointAlreadyExists(boolean jobReferenced) {
var deployment = createDeployment();
context = TestUtils.createSnapshotContext(client, jobReferenced ? deployment : null);
var snapshot = createSavepoint(jobReferenced ? deployment : null, true);
controller.reconcile(snapshot, context);
var status = snapshot.getStatus();
var createdAt = Instant.parse(snapshot.getMetadata().getCreationTimestamp());
var resultAt = Instant.parse(status.getResultTimestamp());
assertThat(resultAt).isAfter(createdAt);
assertThat(status.getPath()).isEqualTo(SAVEPOINT_PATH);
assertThat(status.getTriggerId()).isNull();
assertThat(status.getError()).isNull();
assertThat(statusUpdateCounter.getCount()).isEqualTo(1);
}
@ParameterizedTest
@EnumSource(SnapshotType.class)
public void testReconcileLabels(SnapshotType snapshotType) {
var deployment = createDeployment();
context = TestUtils.createSnapshotContext(client, null);
FlinkStateSnapshot snapshot;
if (snapshotType == SnapshotType.SAVEPOINT) {
snapshot = createSavepoint(deployment);
} else {
snapshot = createCheckpoint(deployment, CheckpointType.FULL, 0);
}
// First we have empty secondary resource, update labels but not status
assertThat(snapshot.getMetadata().getLabels()).isEmpty();
assertUpdateControl(controller.reconcile(snapshot, context), true, false);
assertLabels(snapshot, null, snapshotType, SnapshotTriggerType.MANUAL, TRIGGER_PENDING);
// Correct secondary resource, update status to IN_PROGRESS, update labels too
context = TestUtils.createSnapshotContext(client, deployment);
assertUpdateControl(controller.reconcile(snapshot, context), true, true);
assertLabels(snapshot, deployment, snapshotType, SnapshotTriggerType.MANUAL, IN_PROGRESS);
// No update to status or labels
assertUpdateControl(controller.reconcile(snapshot, context), false, false);
assertLabels(snapshot, deployment, snapshotType, SnapshotTriggerType.MANUAL, IN_PROGRESS);
// Update to both status and labels
assertUpdateControl(controller.reconcile(snapshot, context), true, true);
assertLabels(snapshot, deployment, snapshotType, SnapshotTriggerType.MANUAL, COMPLETED);
// Try to manually modify label
snapshot.getMetadata().getLabels().put(LABEL_SNAPSHOT_TYPE, "custom-value");
assertUpdateControl(controller.reconcile(snapshot, context), true, false);
assertLabels(snapshot, deployment, snapshotType, SnapshotTriggerType.MANUAL, COMPLETED);
}
private void assertLabels(
FlinkStateSnapshot snapshot,
@Nullable AbstractFlinkResource<?, ?> secondaryResource,
SnapshotType snapshotType,
SnapshotTriggerType snapshotTriggerType,
FlinkStateSnapshotStatus.State state) {
assertThat(snapshot.getMetadata().getLabels().get(LABEL_SNAPSHOT_TYPE))
.isEqualTo(snapshotType.name());
assertThat(snapshot.getMetadata().getLabels().get(LABEL_SNAPSHOT_TRIGGER_TYPE))
.isEqualTo(snapshotTriggerType.name());
assertThat(snapshot.getMetadata().getLabels().get(LABEL_SNAPSHOT_STATE))
.isEqualTo(state.name());
if (secondaryResource == null) {
assertThat(snapshot.getMetadata().getLabels())
.doesNotContainKey(LABEL_SNAPSHOT_JOB_REFERENCE_KIND);
assertThat(snapshot.getMetadata().getLabels())
.doesNotContainKey(LABEL_SNAPSHOT_JOB_REFERENCE_NAME);
} else {
assertThat(snapshot.getMetadata().getLabels().get(LABEL_SNAPSHOT_JOB_REFERENCE_KIND))
.isEqualTo(secondaryResource.getKind());
assertThat(snapshot.getMetadata().getLabels().get(LABEL_SNAPSHOT_JOB_REFERENCE_NAME))
.isEqualTo(secondaryResource.getMetadata().getName());
}
}
private void assertUpdateControl(
UpdateControl<FlinkStateSnapshot> actual, boolean updateResource, boolean patchStatus) {
assertThat(actual.isPatchResource()).isEqualTo(updateResource);
assertThat(actual.isPatchStatus()).isEqualTo(patchStatus);
}
@Test
public void testReconcileSnapshotDeploymentDoesNotExist() {
var deployment = createDeployment();
context = TestUtils.createSnapshotContext(client, null);
var snapshot = createSavepoint(deployment);
controller.reconcile(snapshot, context);
assertThat(snapshot.getStatus().getState()).isEqualTo(TRIGGER_PENDING);
assertThat(flinkStateSnapshotEventCollector.events)
.hasSize(1)
.allSatisfy(
e -> {
assertThat(e.getReason())
.isEqualTo(EventRecorder.Reason.ValidationError.name());
assertThat(e.getType()).isEqualTo(EventRecorder.Type.Warning.name());
assertThat(e.getMessage()).contains("was not found");
});
}
@Test
public void testReconcileSnapshotAbandoned() {
var deployment = createDeployment();
context = TestUtils.createSnapshotContext(client, deployment);
var snapshot = createSavepoint(deployment);
controller.reconcile(snapshot, context);
assertThat(snapshot.getStatus().getState()).isEqualTo(IN_PROGRESS);
deployment.getStatus().getJobStatus().setState(CANCELED);
controller.reconcile(snapshot, context);
var status = snapshot.getStatus();
var createdAt = Instant.parse(snapshot.getMetadata().getCreationTimestamp());
var triggerAt = Instant.parse(status.getTriggerTimestamp());
assertThat(triggerAt).isAfter(createdAt);
assertThat(status.getPath()).isNull();
assertThat(status.getTriggerId()).isEqualTo("savepoint_trigger_0");
assertThat(status.getState()).isEqualTo(ABANDONED);
assertThat(statusUpdateCounter.getCount()).isEqualTo(2);
}
@Test
public void testReconcileNewSavepoint() {
var deployment = createDeployment();
context = TestUtils.createSnapshotContext(client, deployment);
var snapshot = createSavepoint(deployment);
controller.reconcile(snapshot, context);
var status = snapshot.getStatus();
var createdAt = Instant.parse(snapshot.getMetadata().getCreationTimestamp());
var triggerAt = Instant.parse(status.getTriggerTimestamp());
assertThat(triggerAt).isAfter(createdAt);
assertThat(status.getPath()).isNull();
assertThat(status.getError()).isNull();
assertThat(status.getTriggerId()).isEqualTo("savepoint_trigger_0");
assertThat(status.getState()).isEqualTo(IN_PROGRESS);
assertThat(snapshot.getMetadata().getLabels().get(LABEL_SNAPSHOT_TRIGGER_TYPE))
.isEqualTo(SnapshotTriggerType.MANUAL.name());
assertThat(statusUpdateCounter.getCount()).isEqualTo(1);
// First time check will still result in pending due to TestingFlinkService impl
controller.reconcile(snapshot, context);
assertThat(status.getState()).isEqualTo(IN_PROGRESS);
// Second time check complete
controller.reconcile(snapshot, context);
status = snapshot.getStatus();
assertThat(status.getState()).isEqualTo(COMPLETED);
assertThat(status.getPath()).isEqualTo("savepoint_0");
assertThat(status.getError()).isNull();
assertThat(statusUpdateCounter.getCount()).isEqualTo(2);
}
@Test
public void testReconcileSavepointCleanup() {
var deployment = createDeployment();
context = TestUtils.createSnapshotContext(client, deployment);
var snapshot = createSavepoint(deployment);
snapshot.setStatus(new FlinkStateSnapshotStatus());
snapshot.getSpec().getSavepoint().setDisposeOnDelete(true);
snapshot.getStatus().setState(TRIGGER_PENDING);
assertDeleteControl(controller.cleanup(snapshot, context), true, null);
assertThat(flinkService.getDisposedSavepoints()).isEmpty();
snapshot.getSpec().getSavepoint().setDisposeOnDelete(true);
snapshot.getStatus().setState(FAILED);
assertDeleteControl(controller.cleanup(snapshot, context), true, null);
assertThat(flinkService.getDisposedSavepoints()).isEmpty();
snapshot.getSpec().getSavepoint().setDisposeOnDelete(true);
snapshot.getStatus().setState(ABANDONED);
assertDeleteControl(controller.cleanup(snapshot, context), true, null);
assertThat(flinkService.getDisposedSavepoints()).isEmpty();
snapshot.getSpec().getSavepoint().setDisposeOnDelete(true);
snapshot.getStatus().setState(IN_PROGRESS);
assertDeleteControl(
controller.cleanup(snapshot, context),
false,
configManager.getOperatorConfiguration().getReconcileInterval().toMillis());
assertThat(flinkService.getDisposedSavepoints()).isEmpty();
// No disposal requested
snapshot.getSpec().getSavepoint().setDisposeOnDelete(false);
snapshot.getStatus().setState(COMPLETED);
assertDeleteControl(controller.cleanup(snapshot, context), true, null);
assertThat(flinkService.getDisposedSavepoints()).isEmpty();
snapshot.getStatus().setPath(SAVEPOINT_PATH);
snapshot.getStatus().setState(COMPLETED);
// Failed dispose, job not found
snapshot.getSpec().getSavepoint().setDisposeOnDelete(true);
assertDeleteControl(
controller.cleanup(snapshot, TestUtils.createSnapshotContext(client, null)),
false,
configManager.getOperatorConfiguration().getReconcileInterval().toMillis());
assertThat(flinkService.getDisposedSavepoints()).isEmpty();
// Failed dispose, job not running
deployment.getStatus().getJobStatus().setState(CANCELED);
snapshot.getSpec().getSavepoint().setDisposeOnDelete(true);
assertDeleteControl(
controller.cleanup(snapshot, context),
false,
configManager.getOperatorConfiguration().getReconcileInterval().toMillis());
assertThat(flinkService.getDisposedSavepoints()).isEmpty();
deployment.getStatus().getJobStatus().setState(RUNNING);
// Failed dispose, REST error
snapshot.getSpec().getSavepoint().setDisposeOnDelete(true);
flinkService.setDisposeSavepointFailure(true);
assertDeleteControl(
controller.cleanup(snapshot, TestUtils.createSnapshotContext(client, null)),
false,
configManager.getOperatorConfiguration().getReconcileInterval().toMillis());
assertThat(flinkService.getDisposedSavepoints()).isEmpty();
flinkService.setDisposeSavepointFailure(false);
// Successful disposal
snapshot.getSpec().getSavepoint().setDisposeOnDelete(true);
assertDeleteControl(controller.cleanup(snapshot, context), true, null);
assertThat(flinkService.getDisposedSavepoints())
.hasSize(1)
.allSatisfy(s -> assertThat(s).isEqualTo(SAVEPOINT_PATH));
}
private void assertDeleteControl(
DeleteControl deleteControl, boolean removeFinalizer, @Nullable Long scheduleDelay) {
assertThat(deleteControl)
.satisfies(
c -> {
assertThat(c.isRemoveFinalizer()).isEqualTo(removeFinalizer);
assertThat(c.getScheduleDelay())
.isEqualTo(Optional.ofNullable(scheduleDelay));
});
}
@Test
public void testReconcileNewSavepointNoPath() {
var deployment = createDeployment();
context = TestUtils.createSnapshotContext(client, deployment);
var snapshot = createSavepoint(deployment);
snapshot.getSpec().getSavepoint().setPath(null);
var ex =
assertThrows(
ReconciliationException.class,
() -> controller.reconcile(snapshot, context));
controller.updateErrorStatus(snapshot, context, ex);
assertThat(snapshot.getStatus().getState()).isEqualTo(TRIGGER_PENDING);
assertThat(snapshot.getStatus().getPath()).isNull();
assertThat(snapshot.getStatus().getError()).contains("savepoint path");
// Add path to spec, it should work then
snapshot.getSpec().getSavepoint().setPath(SAVEPOINT_PATH);
controller.reconcile(snapshot, context);
assertThat(snapshot.getStatus().getState()).isEqualTo(IN_PROGRESS);
assertThat(flinkStateSnapshotEventCollector.events)
.hasSize(1)
.allSatisfy(
event -> {
assertThat(event.getReason())
.isEqualTo(EventRecorder.Reason.SavepointError.name());
assertThat(event.getType())
.isEqualTo(EventRecorder.Type.Warning.name());
assertThat(event.getMessage())
.isEqualTo(snapshot.getStatus().getError());
});
}
@Test
public void testReconcileNewCheckpoint() {
var deployment = createDeployment();
context = TestUtils.createSnapshotContext(client, deployment);
var checkpointType = CheckpointType.FULL;
var snapshot = createCheckpoint(deployment, checkpointType, 1);
controller.reconcile(snapshot, context);
var status = snapshot.getStatus();
var createdAt = Instant.parse(snapshot.getMetadata().getCreationTimestamp());
var triggerAt = Instant.parse(status.getTriggerTimestamp());
assertThat(triggerAt).isAfter(createdAt);
assertThat(status.getPath()).isNull();
assertThat(status.getError()).isNull();
assertThat(status.getTriggerId()).isEqualTo("checkpoint_trigger_0");
assertThat(status.getState()).isEqualTo(IN_PROGRESS);
assertThat(statusUpdateCounter.getCount()).isEqualTo(1);
// First time check will still result in pending due to TestingFlinkService impl
controller.reconcile(snapshot, context);
assertThat(status.getState()).isEqualTo(IN_PROGRESS);
// Second time check complete
controller.reconcile(snapshot, context);
status = snapshot.getStatus();
assertThat(status.getState()).isEqualTo(COMPLETED);
assertThat(status.getPath()).isEqualTo("checkpoint_1");
assertThat(statusUpdateCounter.getCount()).isEqualTo(2);
}
@Test
public void testReconcileNewCheckpointUnsupportedFlinkVersion() {
var deployment = createDeployment(FlinkVersion.v1_16);
context = TestUtils.createSnapshotContext(client, deployment);
var checkpointType = CheckpointType.FULL;
var snapshot = createCheckpoint(deployment, checkpointType, 0);
var ex =
assertThrows(
ReconciliationException.class,
() -> controller.reconcile(snapshot, context));
controller.updateErrorStatus(snapshot, context, ex);
var status = snapshot.getStatus();
assertThat(status.getState()).isEqualTo(FAILED);
assertThat(status.getPath()).isNull();
assertThat(status.getFailures()).isEqualTo(1);
assertThat(status.getError()).contains("requires Flink 1.17+");
}
@Test
public void testReconcileSavepointError() {
var deployment = createDeployment();
context = TestUtils.createSnapshotContext(client, deployment);
var snapshot = createSavepoint(deployment, false, 1);
controller.reconcile(snapshot, context);
// Remove savepoint triggers so that fetching the savepoint will result in an error
flinkService.getSavepointTriggers().clear();
var ex =
assertThrows(
ReconciliationException.class,
() -> controller.reconcile(snapshot, context));
controller.updateErrorStatus(snapshot, context, ex);
// Backoff limit not reached, we retry in the next reconcile loop
var status = snapshot.getStatus();
var createdAt = Instant.parse(snapshot.getMetadata().getCreationTimestamp());
var triggerAt = Instant.parse(status.getTriggerTimestamp());
assertThat(triggerAt).isAfter(createdAt);
assertThat(status.getState()).isEqualTo(TRIGGER_PENDING);
assertThat(status.getError()).contains(TestingFlinkService.SNAPSHOT_ERROR_MESSAGE);
controller.reconcile(snapshot, context);
assertThat(snapshot.getStatus().getState()).isEqualTo(IN_PROGRESS);
flinkService.getSavepointTriggers().clear();
// Backoff limit reached, we have FAILED state
ex =
assertThrows(
ReconciliationException.class,
() -> controller.reconcile(snapshot, context));
controller.updateErrorStatus(snapshot, context, ex);
status = snapshot.getStatus();
createdAt = Instant.parse(snapshot.getMetadata().getCreationTimestamp());
triggerAt = Instant.parse(status.getTriggerTimestamp());
assertThat(triggerAt).isAfter(createdAt);
assertThat(status.getState()).isEqualTo(FAILED);
assertThat(status.getPath()).isNull();
assertThat(status.getFailures()).isEqualTo(2);
assertThat(status.getError()).contains(TestingFlinkService.SNAPSHOT_ERROR_MESSAGE);
assertThat(statusUpdateCounter.getCount()).isEqualTo(4);
assertThat(flinkStateSnapshotEventCollector.events)
.hasSize(2)
.allSatisfy(
event -> {
assertThat(event.getReason())
.isEqualTo(EventRecorder.Reason.SavepointError.name());
assertThat(event.getType())
.isEqualTo(EventRecorder.Type.Warning.name());
assertThat(event.getMessage())
.contains(TestingFlinkService.SNAPSHOT_ERROR_MESSAGE);
});
}
@Test
public void testReconcileCheckpointError() {
var deployment = createDeployment();
context = TestUtils.createSnapshotContext(client, deployment);
var snapshot = createCheckpoint(deployment, CheckpointType.FULL, 1);
controller.reconcile(snapshot, context);
// Remove savepoint triggers so that fetching the savepoint will result in an error
flinkService.getCheckpointTriggers().clear();
var ex =
assertThrows(
ReconciliationException.class,
() -> controller.reconcile(snapshot, context));
controller.updateErrorStatus(snapshot, context, ex);
// Backoff limit not reached, we retry in the next reconcile loop
var status = snapshot.getStatus();
var createdAt = Instant.parse(snapshot.getMetadata().getCreationTimestamp());
var triggerAt = Instant.parse(status.getTriggerTimestamp());
assertThat(triggerAt).isAfter(createdAt);
assertThat(status.getState()).isEqualTo(TRIGGER_PENDING);
assertThat(status.getError()).contains(TestingFlinkService.SNAPSHOT_ERROR_MESSAGE);
controller.reconcile(snapshot, context);
assertThat(snapshot.getStatus().getState()).isEqualTo(IN_PROGRESS);
flinkService.getCheckpointTriggers().clear();
// Backoff limit reached, we have FAILED state
ex =
assertThrows(
ReconciliationException.class,
() -> controller.reconcile(snapshot, context));
controller.updateErrorStatus(snapshot, context, ex);
status = snapshot.getStatus();
createdAt = Instant.parse(snapshot.getMetadata().getCreationTimestamp());
triggerAt = Instant.parse(status.getTriggerTimestamp());
assertThat(triggerAt).isAfter(createdAt);
assertThat(status.getState()).isEqualTo(FAILED);
assertThat(status.getPath()).isNull();
assertThat(status.getFailures()).isEqualTo(2);
assertThat(status.getError()).contains(TestingFlinkService.SNAPSHOT_ERROR_MESSAGE);
assertThat(statusUpdateCounter.getCount()).isEqualTo(4);
assertThat(flinkStateSnapshotEventCollector.events)
.hasSize(2)
.allSatisfy(
event -> {
assertThat(event.getReason())
.isEqualTo(EventRecorder.Reason.CheckpointError.name());
assertThat(event.getType())
.isEqualTo(EventRecorder.Type.Warning.name());
assertThat(event.getMessage())
.contains(TestingFlinkService.SNAPSHOT_ERROR_MESSAGE);
});
}
@Test
public void testReconcileJobNotFound() {
var deployment = createDeployment();
var snapshot = createSavepoint(deployment);
var errorMessage =
String.format(
"Secondary resource %s (%s) for savepoint %s was not found",
deployment.getMetadata().getName(),
CrdConstants.KIND_FLINK_DEPLOYMENT,
SAVEPOINT_NAME);
// First reconcile will trigger the snapshot.
controller.reconcile(snapshot, TestUtils.createSnapshotContext(client, deployment));
var status = snapshot.getStatus();
assertThat(status.getState()).isEqualTo(IN_PROGRESS);
assertThat(status.getPath()).isNull();
assertThat(status.getError()).isNull();
// Second reconcile will abandon the snapshot, as secondary resource won't be found in
// observe phase.
controller.reconcile(snapshot, TestUtils.createSnapshotContext(client, null));
status = snapshot.getStatus();
assertThat(status.getState()).isEqualTo(ABANDONED);
assertThat(status.getPath()).isNull();
assertThat(status.getError()).isEqualTo(errorMessage);
// observe phase triggers event for snapshot abandoned, then validation will also trigger an
// event.
assertThat(flinkStateSnapshotEventCollector.events).hasSize(1);
assertThat(flinkStateSnapshotEventCollector.events.get(0))
.satisfies(
event -> {
assertThat(event.getReason())
.isEqualTo(EventRecorder.Reason.SnapshotAbandoned.name());
assertThat(event.getType())
.isEqualTo(EventRecorder.Type.Warning.name());
assertThat(event.getMessage()).isEqualTo(errorMessage);
});
}
@Test
public void testReconcileJobNotRunning() {
var deployment = createDeployment();
deployment.getStatus().getJobStatus().setState(CANCELED);
context = TestUtils.createSnapshotContext(client, deployment);
var snapshot = createSavepoint(deployment);
var errorMessage =
String.format(
"Secondary resource %s (%s) for savepoint %s is not running",
deployment.getMetadata().getName(),
CrdConstants.KIND_FLINK_DEPLOYMENT,
SAVEPOINT_NAME);
controller.reconcile(snapshot, context);
var status = snapshot.getStatus();
assertThat(status.getState()).isEqualTo(ABANDONED);
assertThat(status.getPath()).isNull();
assertThat(status.getError()).isEqualTo(errorMessage);
assertThat(status.getTriggerId()).isNull();
assertThat(flinkStateSnapshotEventCollector.events)
.hasSize(1)
.allSatisfy(
event -> {
assertThat(event.getReason())
.isEqualTo(EventRecorder.Reason.SnapshotAbandoned.name());
assertThat(event.getType())
.isEqualTo(EventRecorder.Type.Warning.name());
assertThat(event.getMessage()).isEqualTo(errorMessage);
});
}
@Test
public void testMetrics() {
var deployment = createDeployment();
var savepoint = createSavepoint(deployment);
savepoint.getSpec().getSavepoint().setDisposeOnDelete(false);
var checkpoint = createCheckpoint(deployment, CheckpointType.FULL, 1);
context = TestUtils.createSnapshotContext(client, deployment);
controller.reconcile(savepoint, context);
controller.reconcile(savepoint, context);
controller.reconcile(savepoint, context);
assertThat(savepoint.getStatus().getState()).isEqualTo(COMPLETED);
controller.reconcile(checkpoint, context);
controller.reconcile(checkpoint, context);
controller.reconcile(checkpoint, context);
assertThat(checkpoint.getStatus().getState()).isEqualTo(COMPLETED);
assertSnapshotMetrics(
listener, TestUtils.TEST_NAMESPACE, Map.of(COMPLETED, 1), Map.of(COMPLETED, 1));
// Remove savepoint
assertDeleteControl(controller.cleanup(savepoint, context), true, null);
assertSnapshotMetrics(listener, TestUtils.TEST_NAMESPACE, Map.of(), Map.of(COMPLETED, 1));
// Remove checkpoint
assertDeleteControl(controller.cleanup(checkpoint, context), true, null);
assertSnapshotMetrics(listener, TestUtils.TEST_NAMESPACE, Map.of(), Map.of());
}
private FlinkStateSnapshot createSavepoint(FlinkDeployment deployment) {
return createSavepoint(deployment, false, 7);
}
private FlinkStateSnapshot createSavepoint(FlinkDeployment deployment, boolean alreadyExists) {
return createSavepoint(deployment, alreadyExists, 7);
}
private FlinkStateSnapshot createSavepoint(
FlinkDeployment deployment, boolean alreadyExists, int backoffLimit) {
var snapshot =
TestUtils.buildFlinkStateSnapshotSavepoint(
SAVEPOINT_NAME,
TestUtils.TEST_NAMESPACE,
SAVEPOINT_PATH,
alreadyExists,
deployment == null ? null : JobReference.fromFlinkResource(deployment));
snapshot.getSpec().setBackoffLimit(backoffLimit);
snapshot.getSpec().getSavepoint().setFormatType(SavepointFormatType.CANONICAL);
client.resource(snapshot).create();
return snapshot;
}
private FlinkStateSnapshot createCheckpoint(
FlinkDeployment deployment, CheckpointType checkpointType, int backoffLimit) {
var snapshot =
TestUtils.buildFlinkStateSnapshotCheckpoint(
CHECKPOINT_NAME,
TestUtils.TEST_NAMESPACE,
checkpointType,
JobReference.fromFlinkResource(deployment));
snapshot.getSpec().setBackoffLimit(backoffLimit);
client.resource(snapshot).create();
return snapshot;
}
private FlinkDeployment createDeployment() {
return createDeployment(FlinkVersion.v1_20);
}
private FlinkDeployment createDeployment(FlinkVersion flinkVersion) {
var deployment = TestUtils.buildApplicationCluster();
deployment
.getStatus()
.setJobStatus(JobStatus.builder().state(RUNNING).jobId(JOB_ID).build());
deployment.getSpec().setFlinkVersion(flinkVersion);
deployment
.getSpec()
.getFlinkConfiguration()
.remove(CheckpointingOptions.SAVEPOINT_DIRECTORY.key());
ReconciliationUtils.updateStatusForDeployedSpec(deployment, new Configuration());
client.resource(deployment).create();
return deployment;
}
private static class StatusUpdateCounter
implements BiConsumer<FlinkStateSnapshot, FlinkStateSnapshotStatus> {
private int counter;
@Override
public void accept(FlinkStateSnapshot resource, FlinkStateSnapshotStatus prevStatus) {
counter++;
}
public int getCount() {
return counter;
}
}
}
|
googleapis/google-cloud-java | 36,885 | java-dialogflow-cx/google-cloud-dialogflow-cx/src/test/java/com/google/cloud/dialogflow/cx/v3beta1/FlowsClientTest.java | /*
* Copyright 2025 Google LLC
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* https://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package com.google.cloud.dialogflow.cx.v3beta1;
import static com.google.cloud.dialogflow.cx.v3beta1.FlowsClient.ListFlowsPagedResponse;
import static com.google.cloud.dialogflow.cx.v3beta1.FlowsClient.ListLocationsPagedResponse;
import com.google.api.gax.core.NoCredentialsProvider;
import com.google.api.gax.grpc.GaxGrpcProperties;
import com.google.api.gax.grpc.testing.LocalChannelProvider;
import com.google.api.gax.grpc.testing.MockGrpcService;
import com.google.api.gax.grpc.testing.MockServiceHelper;
import com.google.api.gax.rpc.ApiClientHeaderProvider;
import com.google.api.gax.rpc.InvalidArgumentException;
import com.google.api.gax.rpc.StatusCode;
import com.google.cloud.location.GetLocationRequest;
import com.google.cloud.location.ListLocationsRequest;
import com.google.cloud.location.ListLocationsResponse;
import com.google.cloud.location.Location;
import com.google.common.collect.Lists;
import com.google.longrunning.Operation;
import com.google.protobuf.AbstractMessage;
import com.google.protobuf.Any;
import com.google.protobuf.Empty;
import com.google.protobuf.FieldMask;
import com.google.protobuf.Timestamp;
import io.grpc.StatusRuntimeException;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Arrays;
import java.util.HashMap;
import java.util.List;
import java.util.UUID;
import java.util.concurrent.ExecutionException;
import javax.annotation.Generated;
import org.junit.After;
import org.junit.AfterClass;
import org.junit.Assert;
import org.junit.Before;
import org.junit.BeforeClass;
import org.junit.Test;
@Generated("by gapic-generator-java")
public class FlowsClientTest {
private static MockFlows mockFlows;
private static MockLocations mockLocations;
private static MockServiceHelper mockServiceHelper;
private LocalChannelProvider channelProvider;
private FlowsClient client;
@BeforeClass
public static void startStaticServer() {
mockFlows = new MockFlows();
mockLocations = new MockLocations();
mockServiceHelper =
new MockServiceHelper(
UUID.randomUUID().toString(), Arrays.<MockGrpcService>asList(mockFlows, mockLocations));
mockServiceHelper.start();
}
@AfterClass
public static void stopServer() {
mockServiceHelper.stop();
}
@Before
public void setUp() throws IOException {
mockServiceHelper.reset();
channelProvider = mockServiceHelper.createChannelProvider();
FlowsSettings settings =
FlowsSettings.newBuilder()
.setTransportChannelProvider(channelProvider)
.setCredentialsProvider(NoCredentialsProvider.create())
.build();
client = FlowsClient.create(settings);
}
@After
public void tearDown() throws Exception {
client.close();
}
@Test
public void createFlowTest() throws Exception {
Flow expectedResponse =
Flow.newBuilder()
.setName(FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]").toString())
.setDisplayName("displayName1714148973")
.setDescription("description-1724546052")
.addAllTransitionRoutes(new ArrayList<TransitionRoute>())
.addAllEventHandlers(new ArrayList<EventHandler>())
.addAllTransitionRouteGroups(new ArrayList<String>())
.setNluSettings(NluSettings.newBuilder().build())
.setAdvancedSettings(AdvancedSettings.newBuilder().build())
.setKnowledgeConnectorSettings(KnowledgeConnectorSettings.newBuilder().build())
.setMultiLanguageSettings(Flow.MultiLanguageSettings.newBuilder().build())
.setLocked(true)
.build();
mockFlows.addResponse(expectedResponse);
AgentName parent = AgentName.of("[PROJECT]", "[LOCATION]", "[AGENT]");
Flow flow = Flow.newBuilder().build();
Flow actualResponse = client.createFlow(parent, flow);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateFlowRequest actualRequest = ((CreateFlowRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertEquals(flow, actualRequest.getFlow());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void createFlowExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
AgentName parent = AgentName.of("[PROJECT]", "[LOCATION]", "[AGENT]");
Flow flow = Flow.newBuilder().build();
client.createFlow(parent, flow);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void createFlowTest2() throws Exception {
Flow expectedResponse =
Flow.newBuilder()
.setName(FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]").toString())
.setDisplayName("displayName1714148973")
.setDescription("description-1724546052")
.addAllTransitionRoutes(new ArrayList<TransitionRoute>())
.addAllEventHandlers(new ArrayList<EventHandler>())
.addAllTransitionRouteGroups(new ArrayList<String>())
.setNluSettings(NluSettings.newBuilder().build())
.setAdvancedSettings(AdvancedSettings.newBuilder().build())
.setKnowledgeConnectorSettings(KnowledgeConnectorSettings.newBuilder().build())
.setMultiLanguageSettings(Flow.MultiLanguageSettings.newBuilder().build())
.setLocked(true)
.build();
mockFlows.addResponse(expectedResponse);
String parent = "parent-995424086";
Flow flow = Flow.newBuilder().build();
Flow actualResponse = client.createFlow(parent, flow);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
CreateFlowRequest actualRequest = ((CreateFlowRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertEquals(flow, actualRequest.getFlow());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void createFlowExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
String parent = "parent-995424086";
Flow flow = Flow.newBuilder().build();
client.createFlow(parent, flow);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void deleteFlowTest() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
mockFlows.addResponse(expectedResponse);
FlowName name = FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]");
client.deleteFlow(name);
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteFlowRequest actualRequest = ((DeleteFlowRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void deleteFlowExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
FlowName name = FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]");
client.deleteFlow(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void deleteFlowTest2() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
mockFlows.addResponse(expectedResponse);
String name = "name3373707";
client.deleteFlow(name);
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
DeleteFlowRequest actualRequest = ((DeleteFlowRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void deleteFlowExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
String name = "name3373707";
client.deleteFlow(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listFlowsTest() throws Exception {
Flow responsesElement = Flow.newBuilder().build();
ListFlowsResponse expectedResponse =
ListFlowsResponse.newBuilder()
.setNextPageToken("")
.addAllFlows(Arrays.asList(responsesElement))
.build();
mockFlows.addResponse(expectedResponse);
AgentName parent = AgentName.of("[PROJECT]", "[LOCATION]", "[AGENT]");
ListFlowsPagedResponse pagedListResponse = client.listFlows(parent);
List<Flow> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getFlowsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListFlowsRequest actualRequest = ((ListFlowsRequest) actualRequests.get(0));
Assert.assertEquals(parent.toString(), actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listFlowsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
AgentName parent = AgentName.of("[PROJECT]", "[LOCATION]", "[AGENT]");
client.listFlows(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void listFlowsTest2() throws Exception {
Flow responsesElement = Flow.newBuilder().build();
ListFlowsResponse expectedResponse =
ListFlowsResponse.newBuilder()
.setNextPageToken("")
.addAllFlows(Arrays.asList(responsesElement))
.build();
mockFlows.addResponse(expectedResponse);
String parent = "parent-995424086";
ListFlowsPagedResponse pagedListResponse = client.listFlows(parent);
List<Flow> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getFlowsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListFlowsRequest actualRequest = ((ListFlowsRequest) actualRequests.get(0));
Assert.assertEquals(parent, actualRequest.getParent());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listFlowsExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
String parent = "parent-995424086";
client.listFlows(parent);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getFlowTest() throws Exception {
Flow expectedResponse =
Flow.newBuilder()
.setName(FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]").toString())
.setDisplayName("displayName1714148973")
.setDescription("description-1724546052")
.addAllTransitionRoutes(new ArrayList<TransitionRoute>())
.addAllEventHandlers(new ArrayList<EventHandler>())
.addAllTransitionRouteGroups(new ArrayList<String>())
.setNluSettings(NluSettings.newBuilder().build())
.setAdvancedSettings(AdvancedSettings.newBuilder().build())
.setKnowledgeConnectorSettings(KnowledgeConnectorSettings.newBuilder().build())
.setMultiLanguageSettings(Flow.MultiLanguageSettings.newBuilder().build())
.setLocked(true)
.build();
mockFlows.addResponse(expectedResponse);
FlowName name = FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]");
Flow actualResponse = client.getFlow(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetFlowRequest actualRequest = ((GetFlowRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getFlowExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
FlowName name = FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]");
client.getFlow(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getFlowTest2() throws Exception {
Flow expectedResponse =
Flow.newBuilder()
.setName(FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]").toString())
.setDisplayName("displayName1714148973")
.setDescription("description-1724546052")
.addAllTransitionRoutes(new ArrayList<TransitionRoute>())
.addAllEventHandlers(new ArrayList<EventHandler>())
.addAllTransitionRouteGroups(new ArrayList<String>())
.setNluSettings(NluSettings.newBuilder().build())
.setAdvancedSettings(AdvancedSettings.newBuilder().build())
.setKnowledgeConnectorSettings(KnowledgeConnectorSettings.newBuilder().build())
.setMultiLanguageSettings(Flow.MultiLanguageSettings.newBuilder().build())
.setLocked(true)
.build();
mockFlows.addResponse(expectedResponse);
String name = "name3373707";
Flow actualResponse = client.getFlow(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetFlowRequest actualRequest = ((GetFlowRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getFlowExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
String name = "name3373707";
client.getFlow(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void updateFlowTest() throws Exception {
Flow expectedResponse =
Flow.newBuilder()
.setName(FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]").toString())
.setDisplayName("displayName1714148973")
.setDescription("description-1724546052")
.addAllTransitionRoutes(new ArrayList<TransitionRoute>())
.addAllEventHandlers(new ArrayList<EventHandler>())
.addAllTransitionRouteGroups(new ArrayList<String>())
.setNluSettings(NluSettings.newBuilder().build())
.setAdvancedSettings(AdvancedSettings.newBuilder().build())
.setKnowledgeConnectorSettings(KnowledgeConnectorSettings.newBuilder().build())
.setMultiLanguageSettings(Flow.MultiLanguageSettings.newBuilder().build())
.setLocked(true)
.build();
mockFlows.addResponse(expectedResponse);
Flow flow = Flow.newBuilder().build();
FieldMask updateMask = FieldMask.newBuilder().build();
Flow actualResponse = client.updateFlow(flow, updateMask);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
UpdateFlowRequest actualRequest = ((UpdateFlowRequest) actualRequests.get(0));
Assert.assertEquals(flow, actualRequest.getFlow());
Assert.assertEquals(updateMask, actualRequest.getUpdateMask());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void updateFlowExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
Flow flow = Flow.newBuilder().build();
FieldMask updateMask = FieldMask.newBuilder().build();
client.updateFlow(flow, updateMask);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void trainFlowTest() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
Operation resultOperation =
Operation.newBuilder()
.setName("trainFlowTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockFlows.addResponse(resultOperation);
FlowName name = FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]");
client.trainFlowAsync(name).get();
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
TrainFlowRequest actualRequest = ((TrainFlowRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void trainFlowExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
FlowName name = FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]");
client.trainFlowAsync(name).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void trainFlowTest2() throws Exception {
Empty expectedResponse = Empty.newBuilder().build();
Operation resultOperation =
Operation.newBuilder()
.setName("trainFlowTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockFlows.addResponse(resultOperation);
String name = "name3373707";
client.trainFlowAsync(name).get();
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
TrainFlowRequest actualRequest = ((TrainFlowRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void trainFlowExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
String name = "name3373707";
client.trainFlowAsync(name).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void validateFlowTest() throws Exception {
FlowValidationResult expectedResponse =
FlowValidationResult.newBuilder()
.setName(
FlowValidationResultName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]")
.toString())
.addAllValidationMessages(new ArrayList<ValidationMessage>())
.setUpdateTime(Timestamp.newBuilder().build())
.build();
mockFlows.addResponse(expectedResponse);
ValidateFlowRequest request =
ValidateFlowRequest.newBuilder()
.setName(FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]").toString())
.setLanguageCode("languageCode-2092349083")
.build();
FlowValidationResult actualResponse = client.validateFlow(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
ValidateFlowRequest actualRequest = ((ValidateFlowRequest) actualRequests.get(0));
Assert.assertEquals(request.getName(), actualRequest.getName());
Assert.assertEquals(request.getLanguageCode(), actualRequest.getLanguageCode());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void validateFlowExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
ValidateFlowRequest request =
ValidateFlowRequest.newBuilder()
.setName(FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]").toString())
.setLanguageCode("languageCode-2092349083")
.build();
client.validateFlow(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getFlowValidationResultTest() throws Exception {
FlowValidationResult expectedResponse =
FlowValidationResult.newBuilder()
.setName(
FlowValidationResultName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]")
.toString())
.addAllValidationMessages(new ArrayList<ValidationMessage>())
.setUpdateTime(Timestamp.newBuilder().build())
.build();
mockFlows.addResponse(expectedResponse);
FlowValidationResultName name =
FlowValidationResultName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]");
FlowValidationResult actualResponse = client.getFlowValidationResult(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetFlowValidationResultRequest actualRequest =
((GetFlowValidationResultRequest) actualRequests.get(0));
Assert.assertEquals(name.toString(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getFlowValidationResultExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
FlowValidationResultName name =
FlowValidationResultName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]");
client.getFlowValidationResult(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getFlowValidationResultTest2() throws Exception {
FlowValidationResult expectedResponse =
FlowValidationResult.newBuilder()
.setName(
FlowValidationResultName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]")
.toString())
.addAllValidationMessages(new ArrayList<ValidationMessage>())
.setUpdateTime(Timestamp.newBuilder().build())
.build();
mockFlows.addResponse(expectedResponse);
String name = "name3373707";
FlowValidationResult actualResponse = client.getFlowValidationResult(name);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetFlowValidationResultRequest actualRequest =
((GetFlowValidationResultRequest) actualRequests.get(0));
Assert.assertEquals(name, actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getFlowValidationResultExceptionTest2() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
String name = "name3373707";
client.getFlowValidationResult(name);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void importFlowTest() throws Exception {
ImportFlowResponse expectedResponse =
ImportFlowResponse.newBuilder()
.setFlow(FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]").toString())
.build();
Operation resultOperation =
Operation.newBuilder()
.setName("importFlowTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockFlows.addResponse(resultOperation);
ImportFlowRequest request =
ImportFlowRequest.newBuilder()
.setParent(AgentName.of("[PROJECT]", "[LOCATION]", "[AGENT]").toString())
.setFlowImportStrategy(FlowImportStrategy.newBuilder().build())
.build();
ImportFlowResponse actualResponse = client.importFlowAsync(request).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
ImportFlowRequest actualRequest = ((ImportFlowRequest) actualRequests.get(0));
Assert.assertEquals(request.getParent(), actualRequest.getParent());
Assert.assertEquals(request.getFlowUri(), actualRequest.getFlowUri());
Assert.assertEquals(request.getFlowContent(), actualRequest.getFlowContent());
Assert.assertEquals(request.getImportOption(), actualRequest.getImportOption());
Assert.assertEquals(request.getFlowImportStrategy(), actualRequest.getFlowImportStrategy());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void importFlowExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
ImportFlowRequest request =
ImportFlowRequest.newBuilder()
.setParent(AgentName.of("[PROJECT]", "[LOCATION]", "[AGENT]").toString())
.setFlowImportStrategy(FlowImportStrategy.newBuilder().build())
.build();
client.importFlowAsync(request).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void exportFlowTest() throws Exception {
ExportFlowResponse expectedResponse = ExportFlowResponse.newBuilder().build();
Operation resultOperation =
Operation.newBuilder()
.setName("exportFlowTest")
.setDone(true)
.setResponse(Any.pack(expectedResponse))
.build();
mockFlows.addResponse(resultOperation);
ExportFlowRequest request =
ExportFlowRequest.newBuilder()
.setName(FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]").toString())
.setFlowUri("flowUri-765815458")
.setIncludeReferencedFlows(true)
.build();
ExportFlowResponse actualResponse = client.exportFlowAsync(request).get();
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockFlows.getRequests();
Assert.assertEquals(1, actualRequests.size());
ExportFlowRequest actualRequest = ((ExportFlowRequest) actualRequests.get(0));
Assert.assertEquals(request.getName(), actualRequest.getName());
Assert.assertEquals(request.getFlowUri(), actualRequest.getFlowUri());
Assert.assertEquals(
request.getIncludeReferencedFlows(), actualRequest.getIncludeReferencedFlows());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void exportFlowExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockFlows.addException(exception);
try {
ExportFlowRequest request =
ExportFlowRequest.newBuilder()
.setName(FlowName.of("[PROJECT]", "[LOCATION]", "[AGENT]", "[FLOW]").toString())
.setFlowUri("flowUri-765815458")
.setIncludeReferencedFlows(true)
.build();
client.exportFlowAsync(request).get();
Assert.fail("No exception raised");
} catch (ExecutionException e) {
Assert.assertEquals(InvalidArgumentException.class, e.getCause().getClass());
InvalidArgumentException apiException = ((InvalidArgumentException) e.getCause());
Assert.assertEquals(StatusCode.Code.INVALID_ARGUMENT, apiException.getStatusCode().getCode());
}
}
@Test
public void listLocationsTest() throws Exception {
Location responsesElement = Location.newBuilder().build();
ListLocationsResponse expectedResponse =
ListLocationsResponse.newBuilder()
.setNextPageToken("")
.addAllLocations(Arrays.asList(responsesElement))
.build();
mockLocations.addResponse(expectedResponse);
ListLocationsRequest request =
ListLocationsRequest.newBuilder()
.setName("name3373707")
.setFilter("filter-1274492040")
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
ListLocationsPagedResponse pagedListResponse = client.listLocations(request);
List<Location> resources = Lists.newArrayList(pagedListResponse.iterateAll());
Assert.assertEquals(1, resources.size());
Assert.assertEquals(expectedResponse.getLocationsList().get(0), resources.get(0));
List<AbstractMessage> actualRequests = mockLocations.getRequests();
Assert.assertEquals(1, actualRequests.size());
ListLocationsRequest actualRequest = ((ListLocationsRequest) actualRequests.get(0));
Assert.assertEquals(request.getName(), actualRequest.getName());
Assert.assertEquals(request.getFilter(), actualRequest.getFilter());
Assert.assertEquals(request.getPageSize(), actualRequest.getPageSize());
Assert.assertEquals(request.getPageToken(), actualRequest.getPageToken());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void listLocationsExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLocations.addException(exception);
try {
ListLocationsRequest request =
ListLocationsRequest.newBuilder()
.setName("name3373707")
.setFilter("filter-1274492040")
.setPageSize(883849137)
.setPageToken("pageToken873572522")
.build();
client.listLocations(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
@Test
public void getLocationTest() throws Exception {
Location expectedResponse =
Location.newBuilder()
.setName("name3373707")
.setLocationId("locationId1541836720")
.setDisplayName("displayName1714148973")
.putAllLabels(new HashMap<String, String>())
.setMetadata(Any.newBuilder().build())
.build();
mockLocations.addResponse(expectedResponse);
GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
Location actualResponse = client.getLocation(request);
Assert.assertEquals(expectedResponse, actualResponse);
List<AbstractMessage> actualRequests = mockLocations.getRequests();
Assert.assertEquals(1, actualRequests.size());
GetLocationRequest actualRequest = ((GetLocationRequest) actualRequests.get(0));
Assert.assertEquals(request.getName(), actualRequest.getName());
Assert.assertTrue(
channelProvider.isHeaderSent(
ApiClientHeaderProvider.getDefaultApiClientHeaderKey(),
GaxGrpcProperties.getDefaultApiClientHeaderPattern()));
}
@Test
public void getLocationExceptionTest() throws Exception {
StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT);
mockLocations.addException(exception);
try {
GetLocationRequest request = GetLocationRequest.newBuilder().setName("name3373707").build();
client.getLocation(request);
Assert.fail("No exception raised");
} catch (InvalidArgumentException e) {
// Expected exception.
}
}
}
|
apache/hudi | 37,452 | hudi-spark-datasource/hudi-spark/src/test/java/org/apache/hudi/io/TestMergeHandle.java | /*
* Licensed to the Apache Software Foundation (ASF) under one
* or more contributor license agreements. See the NOTICE file
* distributed with this work for additional information
* regarding copyright ownership. The ASF licenses this file
* to you under the Apache License, Version 2.0 (the
* "License"); you may not use this file except in compliance
* with the License. You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
package org.apache.hudi.io;
import org.apache.hudi.avro.AvroSchemaUtils;
import org.apache.hudi.avro.JoinedGenericRecord;
import org.apache.hudi.client.SecondaryIndexStats;
import org.apache.hudi.client.SparkRDDWriteClient;
import org.apache.hudi.client.WriteStatus;
import org.apache.hudi.common.config.HoodieMetadataConfig;
import org.apache.hudi.common.config.RecordMergeMode;
import org.apache.hudi.common.config.TypedProperties;
import org.apache.hudi.common.engine.HoodieLocalEngineContext;
import org.apache.hudi.common.engine.LocalTaskContextSupplier;
import org.apache.hudi.common.model.DefaultHoodieRecordPayload;
import org.apache.hudi.common.model.HoodieAvroRecord;
import org.apache.hudi.common.model.HoodieBaseFile;
import org.apache.hudi.common.model.HoodieFileGroup;
import org.apache.hudi.common.model.HoodieKey;
import org.apache.hudi.common.model.HoodieLogFile;
import org.apache.hudi.common.model.HoodieRecord;
import org.apache.hudi.common.model.HoodieRecordDelegate;
import org.apache.hudi.common.model.HoodieRecordLocation;
import org.apache.hudi.common.model.HoodieRecordPayload;
import org.apache.hudi.common.model.HoodieWriteStat;
import org.apache.hudi.common.model.OverwriteWithLatestAvroPayload;
import org.apache.hudi.common.model.SerializableIndexedRecord;
import org.apache.hudi.common.table.HoodieTableConfig;
import org.apache.hudi.common.table.HoodieTableMetaClient;
import org.apache.hudi.common.table.cdc.HoodieCDCSupplementalLoggingMode;
import org.apache.hudi.common.table.log.HoodieLogFormat;
import org.apache.hudi.common.table.log.block.HoodieDataBlock;
import org.apache.hudi.common.table.log.block.HoodieLogBlock;
import org.apache.hudi.common.table.read.DeleteContext;
import org.apache.hudi.common.table.view.FileSystemViewStorageConfig;
import org.apache.hudi.common.testutils.HoodieTestDataGenerator;
import org.apache.hudi.common.util.CollectionUtils;
import org.apache.hudi.common.util.ConfigUtils;
import org.apache.hudi.common.util.DateTimeUtils;
import org.apache.hudi.common.util.HoodieRecordUtils;
import org.apache.hudi.common.util.Option;
import org.apache.hudi.common.util.ParquetUtils;
import org.apache.hudi.common.util.collection.ClosableIterator;
import org.apache.hudi.common.util.collection.Pair;
import org.apache.hudi.config.HoodieWriteConfig;
import org.apache.hudi.exception.HoodieIOException;
import org.apache.hudi.keygen.constant.KeyGeneratorOptions;
import org.apache.hudi.storage.StoragePath;
import org.apache.hudi.table.HoodieSparkCopyOnWriteTable;
import org.apache.hudi.table.HoodieSparkTable;
import org.apache.avro.Schema;
import org.apache.avro.generic.GenericRecord;
import org.apache.avro.generic.IndexedRecord;
import org.apache.spark.api.java.JavaRDD;
import org.junit.jupiter.api.Test;
import org.junit.jupiter.params.ParameterizedTest;
import org.junit.jupiter.params.provider.ValueSource;
import org.mockito.MockedConstruction;
import java.io.IOException;
import java.util.ArrayList;
import java.util.Collection;
import java.util.Collections;
import java.util.HashMap;
import java.util.List;
import java.util.Map;
import java.util.Properties;
import java.util.concurrent.atomic.AtomicBoolean;
import java.util.concurrent.atomic.AtomicLong;
import java.util.stream.Collectors;
import static org.apache.hudi.common.table.cdc.HoodieCDCUtils.schemaBySupplementalLoggingMode;
import static org.apache.hudi.common.table.timeline.HoodieTimeline.COMMIT_ACTION;
import static org.apache.hudi.common.testutils.HoodieTestDataGenerator.AVRO_SCHEMA;
import static org.apache.hudi.common.testutils.HoodieTestDataGenerator.TRIP_EXAMPLE_SCHEMA;
import static org.junit.jupiter.api.Assertions.assertEquals;
import static org.junit.jupiter.api.Assertions.assertNotEquals;
import static org.junit.jupiter.api.Assertions.assertNotNull;
import static org.junit.jupiter.api.Assertions.assertTrue;
import static org.junit.jupiter.api.AssertionsKt.assertNull;
import static org.mockito.ArgumentMatchers.any;
import static org.mockito.Mockito.doThrow;
import static org.mockito.Mockito.mockConstruction;
/**
* Unit tests {@link HoodieMergeHandle}.
*/
public class TestMergeHandle extends BaseTestHandle {
private static final String ORDERING_FIELD = "timestamp";
@ParameterizedTest
@ValueSource(booleans = {true, false})
public void testMergeHandleRLIAndSIStatsWithUpdatesAndDeletes(boolean useFileGroupReader) throws Exception {
// delete and recreate
metaClient.getStorage().deleteDirectory(metaClient.getBasePath());
Properties properties = new Properties();
properties.put(KeyGeneratorOptions.RECORDKEY_FIELD_NAME.key(), "_row_key");
properties.put(KeyGeneratorOptions.PARTITIONPATH_FIELD_NAME.key(), "partition_path");
properties.put(HoodieWriteConfig.PRECOMBINE_FIELD_NAME.key(), ORDERING_FIELD);
initMetaClient(getTableType(), properties);
// init config and table
HoodieWriteConfig config = getHoodieWriteConfigBuilder().build();
HoodieSparkTable.create(config, new HoodieLocalEngineContext(storageConf), metaClient);
// one round per partition
String partitionPath = HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS[0];
// init some args
HoodieTestDataGenerator dataGenerator = new HoodieTestDataGenerator(new String[] {partitionPath});
SparkRDDWriteClient client = getHoodieWriteClient(config);
String instantTime = client.startCommit();
List<HoodieRecord> records1 = dataGenerator.generateInserts(instantTime, 100);
JavaRDD<HoodieRecord> writeRecords = jsc.parallelize(records1, 1);
JavaRDD<WriteStatus> statuses = client.upsert(writeRecords, instantTime);
client.commit(instantTime, statuses, Option.empty(), COMMIT_ACTION, Collections.emptyMap(), Option.empty());
metaClient = HoodieTableMetaClient.reload(metaClient);
HoodieSparkCopyOnWriteTable table = (HoodieSparkCopyOnWriteTable) HoodieSparkCopyOnWriteTable.create(config, context, metaClient);
HoodieFileGroup fileGroup = table.getFileSystemView().getAllFileGroups(partitionPath).collect(Collectors.toList()).get(0);
String fileId = fileGroup.getFileGroupId().getFileId();
instantTime = "001";
int numUpdates = 10;
List<HoodieRecord> newRecords = dataGenerator.generateUniqueUpdates(instantTime, numUpdates);
int numDeletes = generateDeleteRecords(newRecords, dataGenerator, instantTime);
if (!useFileGroupReader) {
// legacy merge handle expects HoodieAvroPayload
DeleteContext deleteContext = new DeleteContext(CollectionUtils.emptyProps(), AVRO_SCHEMA).withReaderSchema(AVRO_SCHEMA);
newRecords = newRecords.stream()
.map(avroIndexedRecord -> {
HoodieRecord hoodieRecord = new HoodieAvroRecord<>(avroIndexedRecord.getKey(), new DefaultHoodieRecordPayload(Option.of((GenericRecord) avroIndexedRecord.getData())),
avroIndexedRecord.getOperation(), null, avroIndexedRecord.isDelete(deleteContext, CollectionUtils.emptyProps()));
hoodieRecord.setIgnoreIndexUpdate(avroIndexedRecord.getIgnoreIndexUpdate());
return hoodieRecord;
})
.collect(Collectors.toList());
}
assertTrue(numDeletes > 0);
HoodieWriteMergeHandle mergeHandle;
if (useFileGroupReader) {
mergeHandle = new FileGroupReaderBasedMergeHandle(config, instantTime, table, newRecords.iterator(), partitionPath, fileId,
new LocalTaskContextSupplier(), Option.empty());
} else {
mergeHandle = new HoodieWriteMergeHandle(config, instantTime, table, newRecords.iterator(), partitionPath, fileId, new LocalTaskContextSupplier(),
new HoodieBaseFile(fileGroup.getAllBaseFiles().findFirst().get()), Option.empty());
}
mergeHandle.doMerge();
WriteStatus writeStatus = (WriteStatus) mergeHandle.close().get(0);
// verify stats after merge
int deletesWithIgnoreIndexUpdate = 5;
int expectedNumDelegatesWithIgnoreIndexUpdate = useFileGroupReader ? 0 : deletesWithIgnoreIndexUpdate;
assertEquals(100 - numDeletes, writeStatus.getStat().getNumWrites());
assertEquals(numUpdates, writeStatus.getStat().getNumUpdateWrites());
assertEquals(numDeletes, writeStatus.getStat().getNumDeletes());
// verify record index stats
// numUpdates + numDeletes - new record index updates
assertEquals(numUpdates + numDeletes - (deletesWithIgnoreIndexUpdate - expectedNumDelegatesWithIgnoreIndexUpdate), writeStatus.getIndexStats().getWrittenRecordDelegates().size());
int numDeletedRecordDelegates = 0;
int numDeletedRecordDelegatesWithIgnoreIndexUpdate = 0;
for (HoodieRecordDelegate recordDelegate : writeStatus.getIndexStats().getWrittenRecordDelegates()) {
if (!recordDelegate.getNewLocation().isPresent()) {
numDeletedRecordDelegates++;
if (recordDelegate.getIgnoreIndexUpdate()) {
numDeletedRecordDelegatesWithIgnoreIndexUpdate++;
}
} else {
assertTrue(recordDelegate.getNewLocation().isPresent());
assertEquals(fileId, recordDelegate.getNewLocation().get().getFileId());
assertEquals(instantTime, recordDelegate.getNewLocation().get().getInstantTime());
}
}
// 5 of the deletes are marked with ignoreIndexUpdate in generateDeleteRecords
assertEquals(expectedNumDelegatesWithIgnoreIndexUpdate, numDeletedRecordDelegatesWithIgnoreIndexUpdate);
assertEquals(numDeletes - (deletesWithIgnoreIndexUpdate - expectedNumDelegatesWithIgnoreIndexUpdate), numDeletedRecordDelegates);
// verify secondary index stats
assertEquals(1, writeStatus.getIndexStats().getSecondaryIndexStats().size());
// 2 * numUpdates si records for old secondary keys and new secondary keys related to updates
// numDeletes secondary keys related to deletes
assertEquals(2 * numUpdates + numDeletes, writeStatus.getIndexStats().getSecondaryIndexStats().values().stream().findFirst().get().size());
validateSecondaryIndexStatsContent(writeStatus, numUpdates, numDeletes);
}
@Test
void testWriteFailures() throws Exception {
// delete and recreate
metaClient.getStorage().deleteDirectory(metaClient.getBasePath());
Properties properties = new Properties();
properties.put(KeyGeneratorOptions.RECORDKEY_FIELD_NAME.key(), "_row_key");
properties.put(KeyGeneratorOptions.PARTITIONPATH_FIELD_NAME.key(), "partition_path");
properties.put(HoodieWriteConfig.PRECOMBINE_FIELD_NAME.key(), ORDERING_FIELD);
properties.put(HoodieTableConfig.CDC_ENABLED.key(), "true");
properties.put(HoodieTableConfig.CDC_SUPPLEMENTAL_LOGGING_MODE.key(), HoodieCDCSupplementalLoggingMode.OP_KEY_ONLY.name());
initMetaClient(getTableType(), properties);
// init config and table
HoodieWriteConfig config = getHoodieWriteConfigBuilder().build();
HoodieSparkTable.create(config, new HoodieLocalEngineContext(storageConf), metaClient);
// one round per partition
String partitionPath = HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS[0];
// init some args
HoodieTestDataGenerator dataGenerator = new HoodieTestDataGenerator(new String[] {partitionPath});
SparkRDDWriteClient client = getHoodieWriteClient(config);
String instantTime = client.startCommit();
List<HoodieRecord> records1 = dataGenerator.generateInserts(instantTime, 100);
JavaRDD<HoodieRecord> writeRecords = jsc.parallelize(records1, 1);
JavaRDD<WriteStatus> statuses = client.upsert(writeRecords, instantTime);
client.commit(instantTime, statuses, Option.empty(), COMMIT_ACTION, Collections.emptyMap(), Option.empty());
metaClient = HoodieTableMetaClient.reload(metaClient);
HoodieSparkCopyOnWriteTable table = (HoodieSparkCopyOnWriteTable) HoodieSparkCopyOnWriteTable.create(config, context, metaClient);
HoodieFileGroup fileGroup = table.getFileSystemView().getAllFileGroups(partitionPath).collect(Collectors.toList()).get(0);
String fileId = fileGroup.getFileGroupId().getFileId();
instantTime = client.startCommit();
List<HoodieRecord> updates = dataGenerator.generateUniqueUpdates(instantTime, 10);
FileGroupReaderBasedMergeHandle fileGroupReaderBasedMergeHandle = new FileGroupReaderBasedMergeHandle(
config, instantTime, table, updates.iterator(), partitionPath, fileId, new LocalTaskContextSupplier(),
Option.empty());
List<WriteStatus> writeStatuses;
String recordKeyForFailure = updates.get(5).getRecordKey();
try (MockedConstruction<JoinedGenericRecord> mocked = mockConstruction(JoinedGenericRecord.class,
(mock, context) -> {
doThrow(new HoodieIOException("Simulated write failure for record key: " + recordKeyForFailure))
.when(mock).put(any(), any());
})) {
fileGroupReaderBasedMergeHandle.doMerge();
}
writeStatuses = fileGroupReaderBasedMergeHandle.close();
WriteStatus writeStatus = writeStatuses.get(0);
assertEquals(2, writeStatus.getErrors().size());
// check that record and secondary index stats are non-empty
assertTrue(writeStatus.getWrittenRecordDelegates().isEmpty());
assertTrue(writeStatus.getIndexStats().getSecondaryIndexStats().values().stream().flatMap(Collection::stream).count() == 0L);
AtomicBoolean cdcRecordsFound = new AtomicBoolean(false);
String cdcFilePath = metaClient.getBasePath().toString() + "/" + writeStatus.getStat().getCdcStats().keySet().stream().findFirst().get();
Schema cdcSchema = schemaBySupplementalLoggingMode(HoodieCDCSupplementalLoggingMode.OP_KEY_ONLY, AVRO_SCHEMA);
int recordKeyFieldIndex = cdcSchema.getField("record_key").pos();
try (HoodieLogFormat.Reader reader = HoodieLogFormat.newReader(storage, new HoodieLogFile(cdcFilePath), cdcSchema)) {
while (reader.hasNext()) {
HoodieLogBlock logBlock = reader.next();
if (logBlock instanceof HoodieDataBlock) {
cdcRecordsFound.set(true);
try (ClosableIterator<HoodieRecord<IndexedRecord>> itr = ((HoodieDataBlock) logBlock).getRecordIterator(HoodieRecord.HoodieRecordType.AVRO)) {
itr.forEachRemaining(record -> assertNotEquals(recordKeyForFailure, record.getData().get(recordKeyFieldIndex)));
}
}
}
}
assertTrue(cdcRecordsFound.get(), "No CDC records were processed, validate test setup");
}
@ParameterizedTest
@ValueSource(strings = {"EVENT_TIME_ORDERING", "COMMIT_TIME_ORDERING", "CUSTOM", "CUSTOM_MERGER"})
public void testFGReaderBasedMergeHandleInsertUpsertDelete(String mergeMode) throws IOException {
testFGReaderBasedMergeHandleInsertUpsertDeleteInternal(mergeMode, new Properties(), false);
}
@Test
public void testFGReaderBasedMergeHandleEventTimeMetadata() throws IOException {
Properties properties = new Properties();
properties.put("hoodie.write.track.event.time.watermark", "true");
properties.put("hoodie.payload.event.time.field", "current_ts");
testFGReaderBasedMergeHandleInsertUpsertDeleteInternal("EVENT_TIME_ORDERING", properties, true);
}
private void testFGReaderBasedMergeHandleInsertUpsertDeleteInternal(String mergeMode, Properties writerProps, boolean validateEventTimeMetadata) throws IOException {
metaClient.getStorage().deleteDirectory(metaClient.getBasePath());
HoodieWriteConfig config = getHoodieWriteConfigBuilder().withProperties(writerProps).build();
TypedProperties properties = new TypedProperties();
writerProps.keySet().forEach((key -> properties.put(key, writerProps.get(key))));
properties.put(HoodieTableConfig.RECORDKEY_FIELDS.key(), "_row_key");
properties.put(HoodieTableConfig.PARTITION_FIELDS.key(), "partition_path");
properties.put(HoodieTableConfig.ORDERING_FIELDS.key(), ORDERING_FIELD);
properties.put(HoodieTableConfig.RECORD_MERGE_MODE.key(), mergeMode);
if (mergeMode.equals("CUSTOM_MERGER")) {
config.setValue(HoodieWriteConfig.RECORD_MERGE_IMPL_CLASSES, CustomMerger.class.getName());
properties.put(HoodieTableConfig.RECORD_MERGE_STRATEGY_ID.key(), CustomMerger.getStrategyId());
properties.put(HoodieTableConfig.RECORD_MERGE_MODE.key(), "CUSTOM");
}
String payloadClass = null;
if (mergeMode.equals(RecordMergeMode.CUSTOM.name()) || mergeMode.equals("CUSTOM_MERGER")) {
// set payload class as part of table properties.
properties.put(HoodieTableConfig.PAYLOAD_CLASS_NAME.key(), CustomPayload.class.getName());
payloadClass = CustomPayload.class.getName();
} else if (mergeMode.equals(RecordMergeMode.EVENT_TIME_ORDERING.name())) {
payloadClass = DefaultHoodieRecordPayload.class.getName();
} else if (mergeMode.equals(RecordMergeMode.COMMIT_TIME_ORDERING.name())) {
payloadClass = OverwriteWithLatestAvroPayload.class.getName();
}
initMetaClient(getTableType(), properties);
String partitionPath = HoodieTestDataGenerator.DEFAULT_PARTITION_PATHS[0];
HoodieTestDataGenerator dataGenerator = new HoodieTestDataGenerator(new String[] {partitionPath});
// initial write
List<HoodieRecord> recordsBatch1 = initialWrite(config, dataGenerator, payloadClass, partitionPath);
Map<String, HoodieRecord> recordsBatch1Map = recordsBatch1.stream().map(record -> Pair.of(record.getRecordKey(), record))
.collect(Collectors.toMap(pair -> pair.getKey(), pair -> pair.getValue()));
metaClient = HoodieTableMetaClient.reload(metaClient);
String commit1 = metaClient.getActiveTimeline().getWriteTimeline().filterCompletedInstants().getInstants().get(0).requestedTime();
HoodieSparkCopyOnWriteTable table = (HoodieSparkCopyOnWriteTable) HoodieSparkCopyOnWriteTable.create(config, context, metaClient);
HoodieFileGroup fileGroup = table.getFileSystemView().getAllFileGroups(partitionPath).collect(Collectors.toList()).get(0);
String fileId = fileGroup.getFileGroupId().getFileId();
String instantTime = "001";
InputAndExpectedDataSet inputAndExpectedDataSet = prepareInputFor2ndBatch(config, dataGenerator, payloadClass, partitionPath, mergeMode, recordsBatch1, instantTime,
fileGroup);
Map<String, HoodieRecord> newInsertRecordsMap = inputAndExpectedDataSet.getNewInserts().stream().map(record -> Pair.of(record.getRecordKey(), record))
.collect(Collectors.toMap(Pair::getKey, Pair::getValue));
setCurLocation(inputAndExpectedDataSet.getRecordsToMerge().stream().filter(record -> !newInsertRecordsMap.containsKey(record.getRecordKey())).collect(Collectors.toList()),
fileId, commit1);
Map<String, HoodieRecord> validUpdatesRecordsMap = inputAndExpectedDataSet.getValidUpdates().stream().map(record -> Pair.of(record.getRecordKey(), record))
.collect(Collectors.toMap(Pair::getKey, Pair::getValue));
Map<String, HoodieRecord> validDeletesMap = inputAndExpectedDataSet.getValidDeletes();
Map<String, HoodieRecord> untouchedRecordsFromBatch1 = recordsBatch1Map.entrySet().stream().filter(kv -> {
return (!validUpdatesRecordsMap.containsKey(kv.getKey()) && !validDeletesMap.containsKey(kv.getKey()));
}).collect(Collectors.toMap(Map.Entry::getKey, Map.Entry::getValue));
FileGroupReaderBasedMergeHandle fileGroupReaderBasedMergeHandle = new FileGroupReaderBasedMergeHandle(
config, instantTime, table, inputAndExpectedDataSet.getRecordsToMerge().iterator(), partitionPath, fileId, new LocalTaskContextSupplier(),
Option.empty());
fileGroupReaderBasedMergeHandle.doMerge();
List<WriteStatus> writeStatuses = fileGroupReaderBasedMergeHandle.close();
WriteStatus writeStatus = writeStatuses.get(0);
// read the file and validate values.
String filePath = writeStatus.getStat().getPath();
String fullPath = metaClient.getBasePath() + "/" + filePath;
List<GenericRecord> actualRecords = new ParquetUtils().readAvroRecords(metaClient.getStorage(), new StoragePath(fullPath));
Map<String, GenericRecord> actualRecordsMap = actualRecords.stream()
.map(genRec -> Pair.of(genRec.get("_row_key"), genRec))
.collect(Collectors.toMap(pair -> pair.getKey().toString(), pair -> pair.getValue()));
for (Map.Entry<String, HoodieRecord> entry : inputAndExpectedDataSet.getExpectedRecordsMap().entrySet()) {
assertTrue(actualRecordsMap.containsKey(entry.getKey()));
GenericRecord genericRecord = (GenericRecord) ((HoodieRecordPayload) entry.getValue().getData()).getInsertValue(AVRO_SCHEMA, properties).get();
assertEquals(genericRecord.get(ORDERING_FIELD).toString(), actualRecordsMap.get(entry.getKey()).get(ORDERING_FIELD).toString());
}
// validate that deleted records are not part of actual list
inputAndExpectedDataSet.getValidDeletes().keySet().forEach(deletedKey -> {
assertTrue(!actualRecordsMap.containsKey(deletedKey));
});
HoodieWriteStat stat = writeStatus.getStat();
assertEquals(inputAndExpectedDataSet.getExpectedUpdates(), stat.getNumUpdateWrites());
assertEquals(inputAndExpectedDataSet.getExpectedDeletes(), stat.getNumDeletes());
assertEquals(2, stat.getNumInserts());
validateWriteStatus(writeStatus, commit1, 10 - inputAndExpectedDataSet.getExpectedDeletes() + 2,
inputAndExpectedDataSet.getExpectedUpdates(), 2, inputAndExpectedDataSet.getExpectedDeletes());
// validate event time metadata if enabled
if (validateEventTimeMetadata) {
List<HoodieRecord> records = new ArrayList<>(inputAndExpectedDataSet.getExpectedRecordsMap().values());
validateEventTimeMetadata(writeStatus, writerProps.get("hoodie.payload.event.time.field").toString(), AVRO_SCHEMA, config, properties, records);
} else {
validateEventTimeMetadataNotSet(writeStatus);
}
// validate RLI stats
List<HoodieRecordDelegate> recordDelegates = writeStatus.getIndexStats().getWrittenRecordDelegates();
recordDelegates.forEach(recordDelegate -> {
if (recordDelegate.getNewLocation().isPresent() && recordDelegate.getCurrentLocation().isPresent()) {
// updates
// inserts are also tagged as updates. To be fixed.
assertTrue(validUpdatesRecordsMap.containsKey(recordDelegate.getRecordKey()) || untouchedRecordsFromBatch1.containsKey(recordDelegate.getRecordKey()));
} else if (recordDelegate.getNewLocation().isPresent() && recordDelegate.getCurrentLocation().isEmpty()) {
// inserts
assertTrue(newInsertRecordsMap.containsKey(recordDelegate.getRecordKey()));
} else if (recordDelegate.getCurrentLocation().isPresent() && recordDelegate.getNewLocation().isEmpty()) {
// deletes
assertTrue(validDeletesMap.containsKey(recordDelegate.getRecordKey()));
}
});
// validate SI stats.
assertEquals(1, writeStatus.getIndexStats().getSecondaryIndexStats().size());
assertEquals(inputAndExpectedDataSet.expectedDeletes + 2 * inputAndExpectedDataSet.expectedUpdates + inputAndExpectedDataSet.newInserts.size(),
writeStatus.getIndexStats().getSecondaryIndexStats().get("secondary_index_sec-rider").size());
for (SecondaryIndexStats secondaryIndexStat : writeStatus.getIndexStats().getSecondaryIndexStats().get("secondary_index_sec-rider")) {
if (secondaryIndexStat.isDeleted()) {
// Either the record is deleted or record is updated. For updated record there are two SI entries
// one for older SI record deletion and another for new SI record creation
assertTrue(inputAndExpectedDataSet.validDeletes.containsKey(secondaryIndexStat.getRecordKey())
|| inputAndExpectedDataSet.getValidUpdates().stream().anyMatch(rec -> rec.getRecordKey().equals(secondaryIndexStat.getRecordKey())));
} else {
HoodieRecord record = inputAndExpectedDataSet.expectedRecordsMap.get(secondaryIndexStat.getRecordKey());
assertEquals(record.getColumnValueAsJava(AVRO_SCHEMA, "rider", properties).toString(),
secondaryIndexStat.getSecondaryKeyValue().toString());
}
}
}
private void validateEventTimeMetadataNotSet(WriteStatus writeStatus) {
assertNull(writeStatus.getStat().getMinEventTime());
assertNull(writeStatus.getStat().getMaxEventTime());
}
private void validateEventTimeMetadata(WriteStatus writeStatus, String eventTimeFieldName, Schema schema, HoodieWriteConfig config,
TypedProperties props, List<HoodieRecord> records) {
long actualMinEventTime = writeStatus.getStat().getMinEventTime();
long actualMaxEventTime = writeStatus.getStat().getMaxEventTime();
boolean keepConsistentLogicalTimestamp = ConfigUtils.shouldKeepConsistentLogicalTimestamp(config.getProps());
AtomicLong expectedMinValue = new AtomicLong(Long.MAX_VALUE);
AtomicLong expectedMaxValue = new AtomicLong(Long.MIN_VALUE);
// Append event_time.
records.forEach(record -> {
Object eventTimeValue = record.getColumnValueAsJava(schema, eventTimeFieldName, props);
if (eventTimeValue != null) {
// Append event_time.
Option<Schema.Field> field = AvroSchemaUtils.findNestedField(schema, eventTimeFieldName);
// Field should definitely exist.
eventTimeValue = record.convertColumnValueForLogicalType(
field.get().schema(), eventTimeValue, keepConsistentLogicalTimestamp);
int length = eventTimeValue.toString().length();
Long millisEventTime = null;
if (length == 10) {
millisEventTime = Long.parseLong(eventTimeValue.toString()) * 1000;
} else if (length == 13) {
// eventTimeVal in millis unit
millisEventTime = Long.parseLong(eventTimeValue.toString());
}
long eventTime = DateTimeUtils.parseDateTime(Long.toString(millisEventTime)).toEpochMilli();
expectedMinValue.set(Math.min(expectedMinValue.get(), eventTime));
expectedMaxValue.set(Math.max(expectedMaxValue.get(), eventTime));
}
});
assertEquals(expectedMinValue.get(), actualMinEventTime, "Min event time does not match");
assertEquals(expectedMaxValue.get(), actualMaxEventTime, "Max event time does not match");
}
private List<HoodieRecord> initialWrite(HoodieWriteConfig config, HoodieTestDataGenerator dataGenerator, String payloadClass, String partitionPath) {
List<HoodieRecord> insertRecords = null;
try (SparkRDDWriteClient client = getHoodieWriteClient(config)) {
String instantTime = client.startCommit();
insertRecords = dataGenerator.generateInserts(instantTime, 10);
insertRecords = overrideOrderingValue(insertRecords, config, payloadClass, partitionPath, 5L);
JavaRDD<HoodieRecord> writeRecords = jsc.parallelize(insertRecords, 1);
JavaRDD<WriteStatus> statuses = client.upsert(writeRecords, instantTime);
client.commit(instantTime, statuses, Option.empty(), COMMIT_ACTION, Collections.emptyMap(), Option.empty());
}
return insertRecords;
}
private InputAndExpectedDataSet prepareInputFor2ndBatch(HoodieWriteConfig config, HoodieTestDataGenerator dataGenerator, String payloadClass,
String partitionPath, String mergeMode, List<HoodieRecord> recordsBatch1,
String instantTime, HoodieFileGroup fileGroup) {
List<HoodieRecord> recordsToDelete = new ArrayList<>();
Map<String, HoodieRecord> validDeletes = new HashMap<>();
List<GenericRecord> recordsToUpdate = new ArrayList<>();
List<HoodieRecord> validUpdates = new ArrayList<>();
List<HoodieRecord> newInserts = new ArrayList<>();
int expectedUpdates = 0;
int expectedDeletes = 0;
// Generate records to delete
List<HoodieRecord> newRecords = dataGenerator.generateUniqueUpdates(instantTime, 5);
HoodieRecord deleteRecordSameOrderingValue = generateDeletes(Collections.singletonList(newRecords.get(2)), config, payloadClass, partitionPath, 10L).get(0);
HoodieRecord deleteRecordHigherOrderingValue = generateDeletes(Collections.singletonList(newRecords.get(3)), config, payloadClass, partitionPath, 20L).get(0);
HoodieRecord deleteRecordLowerOrderingValue = generateDeletes(Collections.singletonList(newRecords.get(4)), config, payloadClass, partitionPath, 2L).get(0);
recordsToDelete.add(deleteRecordSameOrderingValue);
recordsToDelete.add(deleteRecordLowerOrderingValue);
recordsToDelete.add(deleteRecordHigherOrderingValue);
// Custom merger chooses record with lower ordering value
if (!mergeMode.equals("CUSTOM_MERGER") && !mergeMode.equals("CUSTOM")) {
validDeletes.put(deleteRecordSameOrderingValue.getRecordKey(), deleteRecordSameOrderingValue);
validDeletes.put(deleteRecordHigherOrderingValue.getRecordKey(), deleteRecordHigherOrderingValue);
expectedDeletes += 2;
}
if (!mergeMode.equals(RecordMergeMode.EVENT_TIME_ORDERING.name())) {
validDeletes.put(deleteRecordLowerOrderingValue.getRecordKey(), deleteRecordLowerOrderingValue);
expectedDeletes += 1;
}
// Generate records to update
GenericRecord genericRecord1 = (GenericRecord) ((SerializableIndexedRecord) newRecords.get(0).getData()).getData();
GenericRecord genericRecord2 = (GenericRecord) ((SerializableIndexedRecord) newRecords.get(1).getData()).getData();
genericRecord1.put(ORDERING_FIELD, 20L);
genericRecord2.put(ORDERING_FIELD, 2L);
recordsToUpdate.add(genericRecord1);
recordsToUpdate.add(genericRecord2);
List<HoodieRecord> hoodieRecordsToUpdate = getHoodieRecords(payloadClass, recordsToUpdate, partitionPath, false);
if (!mergeMode.equals("CUSTOM_MERGER") && !mergeMode.equals("CUSTOM")) {
// Custom merger chooses record with lower ordering value
validUpdates.add(hoodieRecordsToUpdate.get(0));
expectedUpdates += 1;
}
if (!mergeMode.equals(RecordMergeMode.EVENT_TIME_ORDERING.name())) {
validUpdates.add(hoodieRecordsToUpdate.get(1));
expectedUpdates += 1;
}
List<HoodieRecord> recordsToMerge = hoodieRecordsToUpdate;
recordsToMerge.addAll(recordsToDelete);
// Generate records to insert
List<HoodieRecord> recordsToInsert2 = dataGenerator.generateInserts(instantTime, 2);
recordsToInsert2 = overrideOrderingValue(recordsToInsert2, config, payloadClass, partitionPath, 15L);
recordsToMerge.addAll(recordsToInsert2);
newInserts.addAll(recordsToInsert2);
// let's compute the expected record list
Map<String, HoodieRecord> expectedRecordsMap = new HashMap<>();
validUpdates.forEach(rec -> {
expectedRecordsMap.put(rec.getRecordKey(), rec);
});
recordsBatch1.forEach(record -> {
// if not part of new update, if not valid delete, add records from 1st batch.
String recKey = record.getRecordKey();
if (!expectedRecordsMap.containsKey(recKey) && !validDeletes.containsKey(recKey)) {
expectedRecordsMap.put(recKey, record);
}
});
// add new inserts.
newInserts.forEach(record -> {
expectedRecordsMap.put(record.getRecordKey(), record);
});
return new InputAndExpectedDataSet(expectedRecordsMap, expectedUpdates, expectedDeletes, recordsToMerge, newInserts, validUpdates, validDeletes);
}
HoodieWriteConfig.Builder getHoodieWriteConfigBuilder() {
return getConfigBuilder(basePath)
.withPopulateMetaFields(true)
.withFileSystemViewConfig(FileSystemViewStorageConfig.newBuilder().withRemoteServerPort(timelineServicePort).build())
.withMetadataConfig(HoodieMetadataConfig.newBuilder()
.enable(true)
.withEnableRecordIndex(true)
.withStreamingWriteEnabled(true)
.withSecondaryIndexEnabled(true)
.withSecondaryIndexName("sec-rider")
.withSecondaryIndexForColumn("rider")
.build())
.withKeyGenerator(KeyGeneratorForDataGeneratorRecords.class.getCanonicalName())
.withSchema(TRIP_EXAMPLE_SCHEMA);
}
private List<HoodieRecord> overrideOrderingValue(List<HoodieRecord> hoodieRecords, HoodieWriteConfig config, String payloadClass, String partitionPath, long orderingValue) {
List<GenericRecord> genericRecords = hoodieRecords.stream().map(insertRecord -> {
GenericRecord genericRecord = (GenericRecord) ((SerializableIndexedRecord) insertRecord.getData()).getData();
genericRecord.put(ORDERING_FIELD, orderingValue);
return genericRecord;
}).collect(Collectors.toList());
return getHoodieRecords(payloadClass, genericRecords, partitionPath, false);
}
private List<HoodieRecord> generateDeletes(List<HoodieRecord> hoodieRecords, HoodieWriteConfig config, String payloadClass, String partitionPath, long orderingValue) {
List<GenericRecord> genericRecords = hoodieRecords.stream().map(deleteRecord -> {
GenericRecord genericRecord = (GenericRecord) ((SerializableIndexedRecord) deleteRecord.getData()).getData();
genericRecord.put(ORDERING_FIELD, orderingValue);
genericRecord.put(HoodieRecord.HOODIE_IS_DELETED_FIELD, true);
return genericRecord;
}).collect(Collectors.toList());
return getHoodieRecords(payloadClass, genericRecords, partitionPath, true);
}
private List<HoodieRecord> getHoodieRecords(String payloadClass, List<GenericRecord> genericRecords, String partitionPath,
boolean isDelete) {
return genericRecords.stream().map(genericRecord -> {
return (HoodieRecord) new HoodieAvroRecord<>(new HoodieKey(genericRecord.get("_row_key").toString(), partitionPath),
HoodieRecordUtils.loadPayload(payloadClass, genericRecord, (Comparable) genericRecord.get(ORDERING_FIELD)), null,
(Comparable) genericRecord.get(ORDERING_FIELD), isDelete);
}).collect(Collectors.toList());
}
private void setCurLocation(List<HoodieRecord> records, String fileId, String instantTime) {
records.forEach(record -> record.setCurrentLocation(new HoodieRecordLocation(instantTime, fileId)));
}
private static void validateWriteStatus(WriteStatus writeStatus, String previousCommit, long expectedTotalRecordsWritten, long expectedTotalUpdatedRecords,
long expectedTotalInsertedRecords, long expectedTotalDeletedRecords) {
HoodieWriteStat writeStat = writeStatus.getStat();
assertEquals(previousCommit, writeStat.getPrevCommit());
assertNotNull(writeStat.getFileId());
assertNotNull(writeStat.getPath());
assertTrue(writeStat.getFileSizeInBytes() > 0);
assertTrue(writeStat.getTotalWriteBytes() > 0);
assertTrue(writeStat.getTotalLogBlocks() == 0);
assertTrue(writeStat.getTotalLogSizeCompacted() == 0);
assertTrue(writeStat.getTotalLogFilesCompacted() == 0);
assertTrue(writeStat.getTotalLogRecords() == 0);
assertEquals(expectedTotalRecordsWritten, writeStat.getNumWrites());
assertEquals(expectedTotalUpdatedRecords, writeStat.getNumUpdateWrites());
assertEquals(expectedTotalInsertedRecords, writeStat.getNumInserts());
assertEquals(expectedTotalDeletedRecords, writeStat.getNumDeletes());
}
class InputAndExpectedDataSet {
private final Map<String, HoodieRecord> expectedRecordsMap;
private final int expectedUpdates;
private final int expectedDeletes;
private final List<HoodieRecord> recordsToMerge;
private final List<HoodieRecord> newInserts;
private final List<HoodieRecord> validUpdates;
private final Map<String, HoodieRecord> validDeletes;
public InputAndExpectedDataSet(Map<String, HoodieRecord> expectedRecordsMap, int expectedUpdates, int expectedDeletes,
List<HoodieRecord> recordsToMerge, List<HoodieRecord> newInserts, List<HoodieRecord> validUpdates,
Map<String, HoodieRecord> validDeletes) {
this.expectedRecordsMap = expectedRecordsMap;
this.expectedUpdates = expectedUpdates;
this.expectedDeletes = expectedDeletes;
this.recordsToMerge = recordsToMerge;
this.validUpdates = validUpdates;
this.newInserts = newInserts;
this.validDeletes = validDeletes;
}
public Map<String, HoodieRecord> getExpectedRecordsMap() {
return expectedRecordsMap;
}
public int getExpectedUpdates() {
return expectedUpdates;
}
public int getExpectedDeletes() {
return expectedDeletes;
}
public List<HoodieRecord> getRecordsToMerge() {
return recordsToMerge;
}
public List<HoodieRecord> getNewInserts() {
return newInserts;
}
public List<HoodieRecord> getValidUpdates() {
return validUpdates;
}
public Map<String, HoodieRecord> getValidDeletes() {
return validDeletes;
}
}
}
|
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.