repo_id
stringclasses
875 values
size
int64
974
38.9k
file_path
stringlengths
10
308
content
stringlengths
974
38.9k
apache/cxf
36,026
rt/rs/security/sso/saml/src/test/java/org/apache/cxf/rs/security/saml/sso/SAMLResponseValidatorTest.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.cxf.rs.security.saml.sso; import java.io.InputStream; import java.security.KeyStore; import java.security.PrivateKey; import java.security.cert.X509Certificate; import java.time.Duration; import java.time.Instant; import java.util.Collections; import java.util.List; import org.w3c.dom.Document; import org.w3c.dom.Element; import org.apache.cxf.helpers.DOMUtils; import org.apache.wss4j.common.crypto.Crypto; import org.apache.wss4j.common.crypto.CryptoType; import org.apache.wss4j.common.crypto.Merlin; import org.apache.wss4j.common.ext.WSSecurityException; import org.apache.wss4j.common.saml.OpenSAMLUtil; import org.apache.wss4j.common.saml.SAMLCallback; import org.apache.wss4j.common.saml.SAMLUtil; import org.apache.wss4j.common.saml.SamlAssertionWrapper; import org.apache.wss4j.common.saml.bean.AudienceRestrictionBean; import org.apache.wss4j.common.saml.bean.ConditionsBean; import org.apache.wss4j.common.saml.bean.SubjectConfirmationDataBean; import org.apache.wss4j.common.saml.builder.SAML2Constants; import org.apache.wss4j.common.util.Loader; import org.apache.wss4j.dom.engine.WSSConfig; import org.opensaml.saml.common.SAMLVersion; import org.opensaml.saml.common.SignableSAMLObject; import org.opensaml.saml.common.xml.SAMLConstants; import org.opensaml.saml.saml2.core.Response; import org.opensaml.saml.saml2.core.Status; import org.opensaml.security.x509.BasicX509Credential; import org.opensaml.xmlsec.keyinfo.impl.X509KeyInfoGeneratorFactory; import org.opensaml.xmlsec.signature.KeyInfo; import org.opensaml.xmlsec.signature.Signature; import org.opensaml.xmlsec.signature.support.SignatureConstants; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertTrue; import static org.junit.Assert.fail; /** * Some unit tests for the SAMLProtocolResponseValidator. */ public class SAMLResponseValidatorTest { static { WSSConfig.init(); OpenSAMLUtil.initSamlEngine(); } @org.junit.Test public void testCreateAndValidateResponse() throws Exception { Document doc = DOMUtils.createDocument(); Status status = SAML2PResponseComponentBuilder.createStatus( SAMLProtocolResponseValidator.SAML2_STATUSCODE_SUCCESS, null ); Response response = SAML2PResponseComponentBuilder.createSAMLResponse( "http://cxf.apache.org/saml", "http://cxf.apache.org/issuer", status ); // Create an AuthenticationAssertion SAML2CallbackHandler callbackHandler = new SAML2CallbackHandler(); callbackHandler.setStatement(SAML2CallbackHandler.Statement.AUTHN); callbackHandler.setIssuer("http://cxf.apache.org/issuer"); callbackHandler.setConfirmationMethod(SAML2Constants.CONF_SENDER_VOUCHES); SAMLCallback samlCallback = new SAMLCallback(); SAMLUtil.doSAMLCallback(callbackHandler, samlCallback); SamlAssertionWrapper assertion = new SamlAssertionWrapper(samlCallback); response.getAssertions().add(assertion.getSaml2()); Element policyElement = OpenSAMLUtil.toDom(response, doc); doc.appendChild(policyElement); assertNotNull(policyElement); Response marshalledResponse = (Response)OpenSAMLUtil.fromDom(policyElement); // Validate the Response SAMLProtocolResponseValidator validator = new SAMLProtocolResponseValidator(); validator.validateSamlResponse(marshalledResponse, null, null); } @org.junit.Test public void testInvalidStatusCode() throws Exception { Document doc = DOMUtils.createDocument(); Status status = SAML2PResponseComponentBuilder.createStatus( SAMLProtocolResponseValidator.SAML1_STATUSCODE_SUCCESS, null ); Response response = SAML2PResponseComponentBuilder.createSAMLResponse( "http://cxf.apache.org/saml", "http://cxf.apache.org/issuer", status ); // Create an AuthenticationAssertion SAML2CallbackHandler callbackHandler = new SAML2CallbackHandler(); callbackHandler.setStatement(SAML2CallbackHandler.Statement.AUTHN); callbackHandler.setIssuer("http://cxf.apache.org/issuer"); callbackHandler.setConfirmationMethod(SAML2Constants.CONF_SENDER_VOUCHES); SAMLCallback samlCallback = new SAMLCallback(); SAMLUtil.doSAMLCallback(callbackHandler, samlCallback); SamlAssertionWrapper assertion = new SamlAssertionWrapper(samlCallback); response.getAssertions().add(assertion.getSaml2()); Element policyElement = OpenSAMLUtil.toDom(response, doc); doc.appendChild(policyElement); assertNotNull(policyElement); Response marshalledResponse = (Response)OpenSAMLUtil.fromDom(policyElement); // Validate the Response SAMLProtocolResponseValidator validator = new SAMLProtocolResponseValidator(); try { validator.validateSamlResponse(marshalledResponse, null, null); fail("Expected failure on an invalid SAML code"); } catch (WSSecurityException ex) { // expected } } @org.junit.Test public void testRequestDeniedStatusCode() throws Exception { Document doc = DOMUtils.createDocument(); Status status = SAML2PResponseComponentBuilder.createStatus( "urn:oasis:names:tc:SAML:2.0:status:RequestDenied", null ); Response response = SAML2PResponseComponentBuilder.createSAMLResponse( "http://cxf.apache.org/saml", "http://cxf.apache.org/issuer", status ); // Create an AuthenticationAssertion SAML2CallbackHandler callbackHandler = new SAML2CallbackHandler(); callbackHandler.setStatement(SAML2CallbackHandler.Statement.AUTHN); callbackHandler.setIssuer("http://cxf.apache.org/issuer"); callbackHandler.setConfirmationMethod(SAML2Constants.CONF_SENDER_VOUCHES); SAMLCallback samlCallback = new SAMLCallback(); SAMLUtil.doSAMLCallback(callbackHandler, samlCallback); SamlAssertionWrapper assertion = new SamlAssertionWrapper(samlCallback); response.getAssertions().add(assertion.getSaml2()); Element policyElement = OpenSAMLUtil.toDom(response, doc); doc.appendChild(policyElement); assertNotNull(policyElement); Response marshalledResponse = (Response)OpenSAMLUtil.fromDom(policyElement); // Validate the Response SAMLProtocolResponseValidator validator = new SAMLProtocolResponseValidator(); try { validator.validateSamlResponse(marshalledResponse, null, null); fail("Expected failure on an invalid SAML code"); } catch (WSSecurityException ex) { // expected } } @org.junit.Test public void testResponseSignedAssertion() throws Exception { Document doc = DOMUtils.createDocument(); Status status = SAML2PResponseComponentBuilder.createStatus( SAMLProtocolResponseValidator.SAML2_STATUSCODE_SUCCESS, null ); Response response = SAML2PResponseComponentBuilder.createSAMLResponse( "http://cxf.apache.org/saml", "http://cxf.apache.org/issuer", status ); // Create an AuthenticationAssertion SAML2CallbackHandler callbackHandler = new SAML2CallbackHandler(); callbackHandler.setStatement(SAML2CallbackHandler.Statement.AUTHN); callbackHandler.setIssuer("http://cxf.apache.org/issuer"); callbackHandler.setConfirmationMethod(SAML2Constants.CONF_SENDER_VOUCHES); SAMLCallback samlCallback = new SAMLCallback(); SAMLUtil.doSAMLCallback(callbackHandler, samlCallback); SamlAssertionWrapper assertion = new SamlAssertionWrapper(samlCallback); Crypto issuerCrypto = new Merlin(); KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType()); ClassLoader loader = Loader.getClassLoader(SAMLResponseValidatorTest.class); InputStream input = Merlin.loadInputStream(loader, "alice.jks"); keyStore.load(input, "password".toCharArray()); ((Merlin)issuerCrypto).setKeyStore(keyStore); assertion.signAssertion("alice", "password", issuerCrypto, false); response.getAssertions().add(assertion.getSaml2()); Element policyElement = OpenSAMLUtil.toDom(response, doc); doc.appendChild(policyElement); assertNotNull(policyElement); Response marshalledResponse = (Response)OpenSAMLUtil.fromDom(policyElement); // Validate the Response SAMLProtocolResponseValidator validator = new SAMLProtocolResponseValidator(); try { validator.validateSamlResponse(marshalledResponse, null, new KeystorePasswordCallback()); fail("Expected failure on no Signature Crypto"); } catch (WSSecurityException ex) { // expected } // Validate the Response validator.validateSamlResponse( marshalledResponse, issuerCrypto, new KeystorePasswordCallback() ); } @org.junit.Test public void testResponseModifiedSignedAssertion() throws Exception { Document doc = DOMUtils.createDocument(); Status status = SAML2PResponseComponentBuilder.createStatus( SAMLProtocolResponseValidator.SAML2_STATUSCODE_SUCCESS, null ); Response response = SAML2PResponseComponentBuilder.createSAMLResponse( "http://cxf.apache.org/saml", "http://cxf.apache.org/issuer", status ); // Create an AuthenticationAssertion SAML2CallbackHandler callbackHandler = new SAML2CallbackHandler(); callbackHandler.setStatement(SAML2CallbackHandler.Statement.AUTHN); callbackHandler.setIssuer("http://cxf.apache.org/issuer"); callbackHandler.setConfirmationMethod(SAML2Constants.CONF_SENDER_VOUCHES); SAMLCallback samlCallback = new SAMLCallback(); SAMLUtil.doSAMLCallback(callbackHandler, samlCallback); SamlAssertionWrapper assertion = new SamlAssertionWrapper(samlCallback); Crypto issuerCrypto = new Merlin(); KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType()); ClassLoader loader = Loader.getClassLoader(SAMLResponseValidatorTest.class); InputStream input = Merlin.loadInputStream(loader, "alice.jks"); keyStore.load(input, "password".toCharArray()); ((Merlin)issuerCrypto).setKeyStore(keyStore); assertion.signAssertion("alice", "password", issuerCrypto, false); response.getAssertions().add(assertion.getSaml2()); Element policyElement = OpenSAMLUtil.toDom(response, doc); doc.appendChild(policyElement); assertNotNull(policyElement); List<Element> assertions = DOMUtils.findAllElementsByTagNameNS(policyElement, SAMLConstants.SAML20_NS, "Assertion"); assertNotNull(assertions); assertTrue(assertions.size() == 1); Thread.sleep(1000L); Instant issueInstant = Instant.now(); assertions.get(0).setAttributeNS(null, "IssueInstant", issueInstant.toString()); Response marshalledResponse = (Response)OpenSAMLUtil.fromDom(policyElement); // Validate the Response SAMLProtocolResponseValidator validator = new SAMLProtocolResponseValidator(); try { // Validate the Response validator.validateSamlResponse( marshalledResponse, issuerCrypto, new KeystorePasswordCallback() ); fail("Expected failure on a bad signature"); } catch (WSSecurityException ex) { // expected } } @org.junit.Test public void testSignedResponse() throws Exception { Document doc = DOMUtils.createDocument(); Status status = SAML2PResponseComponentBuilder.createStatus( SAMLProtocolResponseValidator.SAML2_STATUSCODE_SUCCESS, null ); Response response = SAML2PResponseComponentBuilder.createSAMLResponse( "http://cxf.apache.org/saml", "http://cxf.apache.org/issuer", status ); // Create an AuthenticationAssertion SAML2CallbackHandler callbackHandler = new SAML2CallbackHandler(); callbackHandler.setStatement(SAML2CallbackHandler.Statement.AUTHN); callbackHandler.setIssuer("http://cxf.apache.org/issuer"); callbackHandler.setConfirmationMethod(SAML2Constants.CONF_SENDER_VOUCHES); SAMLCallback samlCallback = new SAMLCallback(); SAMLUtil.doSAMLCallback(callbackHandler, samlCallback); SamlAssertionWrapper assertion = new SamlAssertionWrapper(samlCallback); Crypto issuerCrypto = new Merlin(); KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType()); ClassLoader loader = Loader.getClassLoader(SAMLResponseValidatorTest.class); InputStream input = Merlin.loadInputStream(loader, "alice.jks"); keyStore.load(input, "password".toCharArray()); ((Merlin)issuerCrypto).setKeyStore(keyStore); response.getAssertions().add(assertion.getSaml2()); signResponse(response, "alice", "password", issuerCrypto, true); Element policyElement = OpenSAMLUtil.toDom(response, doc); doc.appendChild(policyElement); assertNotNull(policyElement); Response marshalledResponse = (Response)OpenSAMLUtil.fromDom(policyElement); // Validate the Response SAMLProtocolResponseValidator validator = new SAMLProtocolResponseValidator(); try { validator.validateSamlResponse(marshalledResponse, null, new KeystorePasswordCallback()); fail("Expected failure on no Signature Crypto"); } catch (WSSecurityException ex) { // expected } // Validate the Response validator.validateSamlResponse( marshalledResponse, issuerCrypto, new KeystorePasswordCallback() ); } @org.junit.Test public void testModifiedSignedResponse() throws Exception { Document doc = DOMUtils.createDocument(); Status status = SAML2PResponseComponentBuilder.createStatus( SAMLProtocolResponseValidator.SAML2_STATUSCODE_SUCCESS, null ); Response response = SAML2PResponseComponentBuilder.createSAMLResponse( "http://cxf.apache.org/saml", "http://cxf.apache.org/issuer", status ); // Create an AuthenticationAssertion SAML2CallbackHandler callbackHandler = new SAML2CallbackHandler(); callbackHandler.setStatement(SAML2CallbackHandler.Statement.AUTHN); callbackHandler.setIssuer("http://cxf.apache.org/issuer"); callbackHandler.setConfirmationMethod(SAML2Constants.CONF_SENDER_VOUCHES); SAMLCallback samlCallback = new SAMLCallback(); SAMLUtil.doSAMLCallback(callbackHandler, samlCallback); SamlAssertionWrapper assertion = new SamlAssertionWrapper(samlCallback); Crypto issuerCrypto = new Merlin(); KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType()); ClassLoader loader = Loader.getClassLoader(SAMLResponseValidatorTest.class); InputStream input = Merlin.loadInputStream(loader, "alice.jks"); keyStore.load(input, "password".toCharArray()); ((Merlin)issuerCrypto).setKeyStore(keyStore); response.getAssertions().add(assertion.getSaml2()); signResponse(response, "alice", "password", issuerCrypto, true); Element policyElement = OpenSAMLUtil.toDom(response, doc); doc.appendChild(policyElement); assertNotNull(policyElement); Thread.sleep(1000L); Instant issueInstant = Instant.now(); policyElement.setAttributeNS(null, "IssueInstant", issueInstant.toString()); Response marshalledResponse = (Response)OpenSAMLUtil.fromDom(policyElement); // Validate the Response SAMLProtocolResponseValidator validator = new SAMLProtocolResponseValidator(); try { // Validate the Response validator.validateSamlResponse( marshalledResponse, issuerCrypto, new KeystorePasswordCallback() ); fail("Expected failure on a bad signature"); } catch (WSSecurityException ex) { // expected } } @org.junit.Test public void testSignedResponseNoKeyInfo() throws Exception { Document doc = DOMUtils.createDocument(); Status status = SAML2PResponseComponentBuilder.createStatus( SAMLProtocolResponseValidator.SAML2_STATUSCODE_SUCCESS, null ); Response response = SAML2PResponseComponentBuilder.createSAMLResponse( "http://cxf.apache.org/saml", "http://cxf.apache.org/issuer", status ); // Create an AuthenticationAssertion SAML2CallbackHandler callbackHandler = new SAML2CallbackHandler(); callbackHandler.setStatement(SAML2CallbackHandler.Statement.AUTHN); callbackHandler.setIssuer("http://cxf.apache.org/issuer"); callbackHandler.setConfirmationMethod(SAML2Constants.CONF_SENDER_VOUCHES); SAMLCallback samlCallback = new SAMLCallback(); SAMLUtil.doSAMLCallback(callbackHandler, samlCallback); SamlAssertionWrapper assertion = new SamlAssertionWrapper(samlCallback); Crypto issuerCrypto = new Merlin(); KeyStore keyStore = KeyStore.getInstance(KeyStore.getDefaultType()); ClassLoader loader = Loader.getClassLoader(SAMLResponseValidatorTest.class); InputStream input = Merlin.loadInputStream(loader, "alice.jks"); keyStore.load(input, "password".toCharArray()); ((Merlin)issuerCrypto).setKeyStore(keyStore); issuerCrypto.setDefaultX509Identifier("alice"); response.getAssertions().add(assertion.getSaml2()); signResponse(response, "alice", "password", issuerCrypto, false); Element policyElement = OpenSAMLUtil.toDom(response, doc); doc.appendChild(policyElement); assertNotNull(policyElement); Response marshalledResponse = (Response)OpenSAMLUtil.fromDom(policyElement); // Validate the Response SAMLProtocolResponseValidator validator = new SAMLProtocolResponseValidator(); validator.setKeyInfoMustBeAvailable(false); try { validator.validateSamlResponse(marshalledResponse, null, new KeystorePasswordCallback()); fail("Expected failure on no Signature Crypto"); } catch (WSSecurityException ex) { // expected } // Validate the Response validator.validateSamlResponse( marshalledResponse, issuerCrypto, new KeystorePasswordCallback() ); } @org.junit.Test public void testResponseInvalidVersion() throws Exception { SubjectConfirmationDataBean subjectConfirmationData = new SubjectConfirmationDataBean(); subjectConfirmationData.setAddress("http://apache.org"); subjectConfirmationData.setInResponseTo("12345"); subjectConfirmationData.setNotAfter(Instant.now().plus(Duration.ofMinutes(5))); subjectConfirmationData.setRecipient("http://recipient.apache.org"); // Create a AuthenticationAssertion SAML2CallbackHandler callbackHandler = new SAML2CallbackHandler(); callbackHandler.setStatement(SAML2CallbackHandler.Statement.AUTHN); callbackHandler.setIssuer("http://cxf.apache.org/issuer"); callbackHandler.setConfirmationMethod(SAML2Constants.CONF_SENDER_VOUCHES); callbackHandler.setSubjectConfirmationData(subjectConfirmationData); ConditionsBean conditions = new ConditionsBean(); conditions.setNotBefore(Instant.now()); conditions.setNotAfter(Instant.now().plus(Duration.ofMinutes(5))); AudienceRestrictionBean audienceRestriction = new AudienceRestrictionBean(); audienceRestriction.setAudienceURIs(Collections.singletonList("http://service.apache.org")); conditions.setAudienceRestrictions(Collections.singletonList(audienceRestriction)); callbackHandler.setConditions(conditions); Response response = createResponse(subjectConfirmationData, callbackHandler); response.setVersion(SAMLVersion.VERSION_10); // Validate the Response SAMLProtocolResponseValidator protocolValidator = new SAMLProtocolResponseValidator(); try { protocolValidator.validateSamlResponse(response, null, null); fail("Expected failure on bad response"); } catch (WSSecurityException ex) { // expected } } @org.junit.Test public void testAssertionBadSubjectConfirmationMethod() throws Exception { SubjectConfirmationDataBean subjectConfirmationData = new SubjectConfirmationDataBean(); subjectConfirmationData.setAddress("http://apache.org"); subjectConfirmationData.setInResponseTo("12345"); subjectConfirmationData.setNotAfter(Instant.now().plus(Duration.ofMinutes(5))); subjectConfirmationData.setRecipient("http://recipient.apache.org"); // Create a AuthenticationAssertion SAML2CallbackHandler callbackHandler = new SAML2CallbackHandler(); callbackHandler.setStatement(SAML2CallbackHandler.Statement.AUTHN); callbackHandler.setIssuer("http://cxf.apache.org/issuer"); callbackHandler.setConfirmationMethod("xyz"); callbackHandler.setSubjectConfirmationData(subjectConfirmationData); ConditionsBean conditions = new ConditionsBean(); conditions.setNotBefore(Instant.now()); conditions.setNotAfter(Instant.now().plus(Duration.ofMinutes(5))); AudienceRestrictionBean audienceRestriction = new AudienceRestrictionBean(); audienceRestriction.setAudienceURIs(Collections.singletonList("http://service.apache.org")); conditions.setAudienceRestrictions(Collections.singletonList(audienceRestriction)); callbackHandler.setConditions(conditions); Response response = createResponse(subjectConfirmationData, callbackHandler); // Validate the Response SAMLProtocolResponseValidator protocolValidator = new SAMLProtocolResponseValidator(); try { protocolValidator.validateSamlResponse(response, null, null); fail("Expected failure on bad response"); } catch (WSSecurityException ex) { // expected } } @org.junit.Test public void testResponseIssueInstant() throws Exception { Document doc = DOMUtils.createDocument(); Status status = SAML2PResponseComponentBuilder.createStatus( SAMLProtocolResponseValidator.SAML2_STATUSCODE_SUCCESS, null ); Response response = SAML2PResponseComponentBuilder.createSAMLResponse( "http://cxf.apache.org/saml", "http://cxf.apache.org/issuer", status ); response.setIssueInstant(Instant.now().plus(Duration.ofMinutes(5))); // Create an AuthenticationAssertion SAML2CallbackHandler callbackHandler = new SAML2CallbackHandler(); callbackHandler.setStatement(SAML2CallbackHandler.Statement.AUTHN); callbackHandler.setIssuer("http://cxf.apache.org/issuer"); callbackHandler.setConfirmationMethod(SAML2Constants.CONF_SENDER_VOUCHES); SAMLCallback samlCallback = new SAMLCallback(); SAMLUtil.doSAMLCallback(callbackHandler, samlCallback); SamlAssertionWrapper assertion = new SamlAssertionWrapper(samlCallback); response.getAssertions().add(assertion.getSaml2()); Element policyElement = OpenSAMLUtil.toDom(response, doc); doc.appendChild(policyElement); assertNotNull(policyElement); Response marshalledResponse = (Response)OpenSAMLUtil.fromDom(policyElement); // Validate the Response SAMLProtocolResponseValidator validator = new SAMLProtocolResponseValidator(); try { validator.validateSamlResponse(marshalledResponse, null, null); fail("Expected failure on an invalid Response IssueInstant"); } catch (WSSecurityException ex) { // expected } } @org.junit.Test public void testAssertionIssueInstant() throws Exception { Document doc = DOMUtils.createDocument(); Status status = SAML2PResponseComponentBuilder.createStatus( SAMLProtocolResponseValidator.SAML2_STATUSCODE_SUCCESS, null ); Response response = SAML2PResponseComponentBuilder.createSAMLResponse( "http://cxf.apache.org/saml", "http://cxf.apache.org/issuer", status ); // Create an AuthenticationAssertion SAML2CallbackHandler callbackHandler = new SAML2CallbackHandler(); callbackHandler.setStatement(SAML2CallbackHandler.Statement.AUTHN); callbackHandler.setIssuer("http://cxf.apache.org/issuer"); callbackHandler.setConfirmationMethod(SAML2Constants.CONF_SENDER_VOUCHES); SAMLCallback samlCallback = new SAMLCallback(); SAMLUtil.doSAMLCallback(callbackHandler, samlCallback); SamlAssertionWrapper assertion = new SamlAssertionWrapper(samlCallback); assertion.getSaml2().setIssueInstant(Instant.now().plus(Duration.ofMinutes(5))); response.getAssertions().add(assertion.getSaml2()); Element policyElement = OpenSAMLUtil.toDom(response, doc); doc.appendChild(policyElement); assertNotNull(policyElement); Response marshalledResponse = (Response)OpenSAMLUtil.fromDom(policyElement); // Validate the Response SAMLProtocolResponseValidator validator = new SAMLProtocolResponseValidator(); try { validator.validateSamlResponse(marshalledResponse, null, null); fail("Expected failure on an invalid Assertion IssueInstant"); } catch (WSSecurityException ex) { // expected } } @org.junit.Test public void testFutureAuthnInstant() throws Exception { Document doc = DOMUtils.createDocument(); Status status = SAML2PResponseComponentBuilder.createStatus( SAMLProtocolResponseValidator.SAML2_STATUSCODE_SUCCESS, null ); Response response = SAML2PResponseComponentBuilder.createSAMLResponse( "http://cxf.apache.org/saml", "http://cxf.apache.org/issuer", status ); // Create an AuthenticationAssertion SAML2CallbackHandler callbackHandler = new SAML2CallbackHandler(); callbackHandler.setStatement(SAML2CallbackHandler.Statement.AUTHN); callbackHandler.setIssuer("http://cxf.apache.org/issuer"); callbackHandler.setConfirmationMethod(SAML2Constants.CONF_SENDER_VOUCHES); callbackHandler.setAuthnInstant(Instant.now().plus(Duration.ofDays(1))); SAMLCallback samlCallback = new SAMLCallback(); SAMLUtil.doSAMLCallback(callbackHandler, samlCallback); SamlAssertionWrapper assertion = new SamlAssertionWrapper(samlCallback); response.getAssertions().add(assertion.getSaml2()); Element policyElement = OpenSAMLUtil.toDom(response, doc); doc.appendChild(policyElement); assertNotNull(policyElement); Response marshalledResponse = (Response)OpenSAMLUtil.fromDom(policyElement); // Validate the Response SAMLProtocolResponseValidator validator = new SAMLProtocolResponseValidator(); try { validator.validateSamlResponse(marshalledResponse, null, null); fail("Expected failure on an invalid Assertion AuthnInstant"); } catch (WSSecurityException ex) { // expected } } @org.junit.Test public void testStaleSessionNotOnOrAfter() throws Exception { Document doc = DOMUtils.createDocument(); Status status = SAML2PResponseComponentBuilder.createStatus( SAMLProtocolResponseValidator.SAML2_STATUSCODE_SUCCESS, null ); Response response = SAML2PResponseComponentBuilder.createSAMLResponse( "http://cxf.apache.org/saml", "http://cxf.apache.org/issuer", status ); // Create an AuthenticationAssertion SAML2CallbackHandler callbackHandler = new SAML2CallbackHandler(); callbackHandler.setStatement(SAML2CallbackHandler.Statement.AUTHN); callbackHandler.setIssuer("http://cxf.apache.org/issuer"); callbackHandler.setConfirmationMethod(SAML2Constants.CONF_SENDER_VOUCHES); callbackHandler.setSessionNotOnOrAfter(Instant.now().minus(Duration.ofDays(1))); SAMLCallback samlCallback = new SAMLCallback(); SAMLUtil.doSAMLCallback(callbackHandler, samlCallback); SamlAssertionWrapper assertion = new SamlAssertionWrapper(samlCallback); response.getAssertions().add(assertion.getSaml2()); Element policyElement = OpenSAMLUtil.toDom(response, doc); doc.appendChild(policyElement); assertNotNull(policyElement); Response marshalledResponse = (Response)OpenSAMLUtil.fromDom(policyElement); // Validate the Response SAMLProtocolResponseValidator validator = new SAMLProtocolResponseValidator(); try { validator.validateSamlResponse(marshalledResponse, null, null); fail("Expected failure on an invalid SessionNotOnOrAfter"); } catch (WSSecurityException ex) { // expected } } @org.junit.Test public void testInvalidSubjectLocality() throws Exception { Document doc = DOMUtils.createDocument(); Status status = SAML2PResponseComponentBuilder.createStatus( SAMLProtocolResponseValidator.SAML2_STATUSCODE_SUCCESS, null ); Response response = SAML2PResponseComponentBuilder.createSAMLResponse( "http://cxf.apache.org/saml", "http://cxf.apache.org/issuer", status ); // Create an AuthenticationAssertion SAML2CallbackHandler callbackHandler = new SAML2CallbackHandler(); callbackHandler.setStatement(SAML2CallbackHandler.Statement.AUTHN); callbackHandler.setIssuer("http://cxf.apache.org/issuer"); callbackHandler.setConfirmationMethod(SAML2Constants.CONF_SENDER_VOUCHES); callbackHandler.setSubjectLocality("xyz.123", null); SAMLCallback samlCallback = new SAMLCallback(); SAMLUtil.doSAMLCallback(callbackHandler, samlCallback); SamlAssertionWrapper assertion = new SamlAssertionWrapper(samlCallback); response.getAssertions().add(assertion.getSaml2()); Element policyElement = OpenSAMLUtil.toDom(response, doc); doc.appendChild(policyElement); assertNotNull(policyElement); Response marshalledResponse = (Response)OpenSAMLUtil.fromDom(policyElement); // Validate the Response SAMLProtocolResponseValidator validator = new SAMLProtocolResponseValidator(); try { validator.validateSamlResponse(marshalledResponse, null, null); fail("Expected failure on an invalid SessionNotOnOrAfter"); } catch (WSSecurityException ex) { // expected } } /** * Sign a SAML Response * @throws Exception */ private void signResponse( Response response, String issuerKeyName, String issuerKeyPassword, Crypto issuerCrypto, boolean useKeyInfo ) throws Exception { // // Create the signature // Signature signature = OpenSAMLUtil.buildSignature(); signature.setCanonicalizationAlgorithm(SignatureConstants.ALGO_ID_C14N_EXCL_OMIT_COMMENTS); // prepare to sign the SAML token CryptoType cryptoType = new CryptoType(CryptoType.TYPE.ALIAS); cryptoType.setAlias(issuerKeyName); X509Certificate[] issuerCerts = issuerCrypto.getX509Certificates(cryptoType); if (issuerCerts == null) { throw new Exception( "No issuer certs were found to sign the SAML Assertion using issuer name: " + issuerKeyName); } String sigAlgo = SignatureConstants.ALGO_ID_SIGNATURE_RSA_SHA1; String pubKeyAlgo = issuerCerts[0].getPublicKey().getAlgorithm(); if ("DSA".equalsIgnoreCase(pubKeyAlgo)) { sigAlgo = SignatureConstants.ALGO_ID_SIGNATURE_DSA; } PrivateKey privateKey = issuerCrypto.getPrivateKey(issuerKeyName, issuerKeyPassword); signature.setSignatureAlgorithm(sigAlgo); BasicX509Credential signingCredential = new BasicX509Credential(issuerCerts[0], privateKey); signature.setSigningCredential(signingCredential); if (useKeyInfo) { X509KeyInfoGeneratorFactory kiFactory = new X509KeyInfoGeneratorFactory(); kiFactory.setEmitEntityCertificate(true); try { KeyInfo keyInfo = kiFactory.newInstance().generate(signingCredential); signature.setKeyInfo(keyInfo); } catch (org.opensaml.security.SecurityException ex) { throw new Exception( "Error generating KeyInfo from signing credential", ex); } } // add the signature to the assertion SignableSAMLObject signableObject = response; signableObject.setSignature(signature); signableObject.releaseDOM(); signableObject.releaseChildrenDOM(true); } private Response createResponse( SubjectConfirmationDataBean subjectConfirmationData, SAML2CallbackHandler callbackHandler ) throws Exception { Document doc = DOMUtils.createDocument(); Status status = SAML2PResponseComponentBuilder.createStatus( SAMLProtocolResponseValidator.SAML2_STATUSCODE_SUCCESS, null ); Response response = SAML2PResponseComponentBuilder.createSAMLResponse( "http://cxf.apache.org/saml", "http://cxf.apache.org/issuer", status ); // Create an AuthenticationAssertion SAMLCallback samlCallback = new SAMLCallback(); SAMLUtil.doSAMLCallback(callbackHandler, samlCallback); SamlAssertionWrapper assertion = new SamlAssertionWrapper(samlCallback); response.getAssertions().add(assertion.getSaml2()); Element policyElement = OpenSAMLUtil.toDom(response, doc); doc.appendChild(policyElement); assertNotNull(policyElement); return (Response)OpenSAMLUtil.fromDom(policyElement); } }
googleapis/google-cloud-java
35,865
java-chronicle/proto-google-cloud-chronicle-v1/src/main/java/com/google/cloud/chronicle/v1/ListReferenceListsRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/chronicle/v1/reference_list.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.chronicle.v1; /** * * * <pre> * A request for a list of reference lists. * </pre> * * Protobuf type {@code google.cloud.chronicle.v1.ListReferenceListsRequest} */ public final class ListReferenceListsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.chronicle.v1.ListReferenceListsRequest) ListReferenceListsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListReferenceListsRequest.newBuilder() to construct. private ListReferenceListsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListReferenceListsRequest() { parent_ = ""; pageToken_ = ""; view_ = 0; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListReferenceListsRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.chronicle.v1.ReferenceListProto .internal_static_google_cloud_chronicle_v1_ListReferenceListsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.chronicle.v1.ReferenceListProto .internal_static_google_cloud_chronicle_v1_ListReferenceListsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.chronicle.v1.ListReferenceListsRequest.class, com.google.cloud.chronicle.v1.ListReferenceListsRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The parent, which owns this collection of reference lists. * Format: * `projects/{project}/locations/{location}/instances/{instance}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The parent, which owns this collection of reference lists. * Format: * `projects/{project}/locations/{location}/instances/{instance}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_ = 0; /** * * * <pre> * The maximum number of reference lists to return. * The service may return fewer than this value. * If unspecified, at most 100 reference lists will be returned. * The maximum value is 1000; values above 1000 will be coerced to 1000. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * A page token, received from a previous `ListReferenceLists` call. * Provide this to retrieve the subsequent page. * When paginating, all other parameters provided to `ListReferenceLists` must * match the call that provided the page token. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * A page token, received from a previous `ListReferenceLists` call. * Provide this to retrieve the subsequent page. * When paginating, all other parameters provided to `ListReferenceLists` must * match the call that provided the page token. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int VIEW_FIELD_NUMBER = 4; private int view_ = 0; /** * * * <pre> * How much of each ReferenceList to view. Defaults to * REFERENCE_LIST_VIEW_BASIC. * </pre> * * <code>.google.cloud.chronicle.v1.ReferenceListView view = 4;</code> * * @return The enum numeric value on the wire for view. */ @java.lang.Override public int getViewValue() { return view_; } /** * * * <pre> * How much of each ReferenceList to view. Defaults to * REFERENCE_LIST_VIEW_BASIC. * </pre> * * <code>.google.cloud.chronicle.v1.ReferenceListView view = 4;</code> * * @return The view. */ @java.lang.Override public com.google.cloud.chronicle.v1.ReferenceListView getView() { com.google.cloud.chronicle.v1.ReferenceListView result = com.google.cloud.chronicle.v1.ReferenceListView.forNumber(view_); return result == null ? com.google.cloud.chronicle.v1.ReferenceListView.UNRECOGNIZED : result; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } if (view_ != com.google.cloud.chronicle.v1.ReferenceListView.REFERENCE_LIST_VIEW_UNSPECIFIED .getNumber()) { output.writeEnum(4, view_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } if (view_ != com.google.cloud.chronicle.v1.ReferenceListView.REFERENCE_LIST_VIEW_UNSPECIFIED .getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(4, view_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.chronicle.v1.ListReferenceListsRequest)) { return super.equals(obj); } com.google.cloud.chronicle.v1.ListReferenceListsRequest other = (com.google.cloud.chronicle.v1.ListReferenceListsRequest) obj; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (view_ != other.view_) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (37 * hash) + VIEW_FIELD_NUMBER; hash = (53 * hash) + view_; hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.chronicle.v1.ListReferenceListsRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.chronicle.v1.ListReferenceListsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.chronicle.v1.ListReferenceListsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.chronicle.v1.ListReferenceListsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.chronicle.v1.ListReferenceListsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.chronicle.v1.ListReferenceListsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.chronicle.v1.ListReferenceListsRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.chronicle.v1.ListReferenceListsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.chronicle.v1.ListReferenceListsRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.chronicle.v1.ListReferenceListsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.chronicle.v1.ListReferenceListsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.chronicle.v1.ListReferenceListsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.chronicle.v1.ListReferenceListsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A request for a list of reference lists. * </pre> * * Protobuf type {@code google.cloud.chronicle.v1.ListReferenceListsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.chronicle.v1.ListReferenceListsRequest) com.google.cloud.chronicle.v1.ListReferenceListsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.chronicle.v1.ReferenceListProto .internal_static_google_cloud_chronicle_v1_ListReferenceListsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.chronicle.v1.ReferenceListProto .internal_static_google_cloud_chronicle_v1_ListReferenceListsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.chronicle.v1.ListReferenceListsRequest.class, com.google.cloud.chronicle.v1.ListReferenceListsRequest.Builder.class); } // Construct using com.google.cloud.chronicle.v1.ListReferenceListsRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; pageSize_ = 0; pageToken_ = ""; view_ = 0; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.chronicle.v1.ReferenceListProto .internal_static_google_cloud_chronicle_v1_ListReferenceListsRequest_descriptor; } @java.lang.Override public com.google.cloud.chronicle.v1.ListReferenceListsRequest getDefaultInstanceForType() { return com.google.cloud.chronicle.v1.ListReferenceListsRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.chronicle.v1.ListReferenceListsRequest build() { com.google.cloud.chronicle.v1.ListReferenceListsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.chronicle.v1.ListReferenceListsRequest buildPartial() { com.google.cloud.chronicle.v1.ListReferenceListsRequest result = new com.google.cloud.chronicle.v1.ListReferenceListsRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.chronicle.v1.ListReferenceListsRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageToken_ = pageToken_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.view_ = view_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.chronicle.v1.ListReferenceListsRequest) { return mergeFrom((com.google.cloud.chronicle.v1.ListReferenceListsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.chronicle.v1.ListReferenceListsRequest other) { if (other == com.google.cloud.chronicle.v1.ListReferenceListsRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000004; onChanged(); } if (other.view_ != 0) { setViewValue(other.getViewValue()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 32: { view_ = input.readEnum(); bitField0_ |= 0x00000008; break; } // case 32 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The parent, which owns this collection of reference lists. * Format: * `projects/{project}/locations/{location}/instances/{instance}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The parent, which owns this collection of reference lists. * Format: * `projects/{project}/locations/{location}/instances/{instance}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The parent, which owns this collection of reference lists. * Format: * `projects/{project}/locations/{location}/instances/{instance}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The parent, which owns this collection of reference lists. * Format: * `projects/{project}/locations/{location}/instances/{instance}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The parent, which owns this collection of reference lists. * Format: * `projects/{project}/locations/{location}/instances/{instance}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int pageSize_; /** * * * <pre> * The maximum number of reference lists to return. * The service may return fewer than this value. * If unspecified, at most 100 reference lists will be returned. * The maximum value is 1000; values above 1000 will be coerced to 1000. * </pre> * * <code>int32 page_size = 2;</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * The maximum number of reference lists to return. * The service may return fewer than this value. * If unspecified, at most 100 reference lists will be returned. * The maximum value is 1000; values above 1000 will be coerced to 1000. * </pre> * * <code>int32 page_size = 2;</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The maximum number of reference lists to return. * The service may return fewer than this value. * If unspecified, at most 100 reference lists will be returned. * The maximum value is 1000; values above 1000 will be coerced to 1000. * </pre> * * <code>int32 page_size = 2;</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000002); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * A page token, received from a previous `ListReferenceLists` call. * Provide this to retrieve the subsequent page. * When paginating, all other parameters provided to `ListReferenceLists` must * match the call that provided the page token. * </pre> * * <code>string page_token = 3;</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A page token, received from a previous `ListReferenceLists` call. * Provide this to retrieve the subsequent page. * When paginating, all other parameters provided to `ListReferenceLists` must * match the call that provided the page token. * </pre> * * <code>string page_token = 3;</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A page token, received from a previous `ListReferenceLists` call. * Provide this to retrieve the subsequent page. * When paginating, all other parameters provided to `ListReferenceLists` must * match the call that provided the page token. * </pre> * * <code>string page_token = 3;</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * A page token, received from a previous `ListReferenceLists` call. * Provide this to retrieve the subsequent page. * When paginating, all other parameters provided to `ListReferenceLists` must * match the call that provided the page token. * </pre> * * <code>string page_token = 3;</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * A page token, received from a previous `ListReferenceLists` call. * Provide this to retrieve the subsequent page. * When paginating, all other parameters provided to `ListReferenceLists` must * match the call that provided the page token. * </pre> * * <code>string page_token = 3;</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private int view_ = 0; /** * * * <pre> * How much of each ReferenceList to view. Defaults to * REFERENCE_LIST_VIEW_BASIC. * </pre> * * <code>.google.cloud.chronicle.v1.ReferenceListView view = 4;</code> * * @return The enum numeric value on the wire for view. */ @java.lang.Override public int getViewValue() { return view_; } /** * * * <pre> * How much of each ReferenceList to view. Defaults to * REFERENCE_LIST_VIEW_BASIC. * </pre> * * <code>.google.cloud.chronicle.v1.ReferenceListView view = 4;</code> * * @param value The enum numeric value on the wire for view to set. * @return This builder for chaining. */ public Builder setViewValue(int value) { view_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * How much of each ReferenceList to view. Defaults to * REFERENCE_LIST_VIEW_BASIC. * </pre> * * <code>.google.cloud.chronicle.v1.ReferenceListView view = 4;</code> * * @return The view. */ @java.lang.Override public com.google.cloud.chronicle.v1.ReferenceListView getView() { com.google.cloud.chronicle.v1.ReferenceListView result = com.google.cloud.chronicle.v1.ReferenceListView.forNumber(view_); return result == null ? com.google.cloud.chronicle.v1.ReferenceListView.UNRECOGNIZED : result; } /** * * * <pre> * How much of each ReferenceList to view. Defaults to * REFERENCE_LIST_VIEW_BASIC. * </pre> * * <code>.google.cloud.chronicle.v1.ReferenceListView view = 4;</code> * * @param value The view to set. * @return This builder for chaining. */ public Builder setView(com.google.cloud.chronicle.v1.ReferenceListView value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000008; view_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * How much of each ReferenceList to view. Defaults to * REFERENCE_LIST_VIEW_BASIC. * </pre> * * <code>.google.cloud.chronicle.v1.ReferenceListView view = 4;</code> * * @return This builder for chaining. */ public Builder clearView() { bitField0_ = (bitField0_ & ~0x00000008); view_ = 0; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.chronicle.v1.ListReferenceListsRequest) } // @@protoc_insertion_point(class_scope:google.cloud.chronicle.v1.ListReferenceListsRequest) private static final com.google.cloud.chronicle.v1.ListReferenceListsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.chronicle.v1.ListReferenceListsRequest(); } public static com.google.cloud.chronicle.v1.ListReferenceListsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListReferenceListsRequest> PARSER = new com.google.protobuf.AbstractParser<ListReferenceListsRequest>() { @java.lang.Override public ListReferenceListsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListReferenceListsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListReferenceListsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.chronicle.v1.ListReferenceListsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,905
java-shopping-merchant-conversions/proto-google-shopping-merchant-conversions-v1/src/main/java/com/google/shopping/merchant/conversions/v1/UpdateConversionSourceRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/shopping/merchant/conversions/v1/conversionsources.proto // Protobuf Java Version: 3.25.8 package com.google.shopping.merchant.conversions.v1; /** * * * <pre> * Request message for the UpdateConversionSource method. * </pre> * * Protobuf type {@code google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest} */ public final class UpdateConversionSourceRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest) UpdateConversionSourceRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateConversionSourceRequest.newBuilder() to construct. private UpdateConversionSourceRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateConversionSourceRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateConversionSourceRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.conversions.v1.ConversionSourcesProto .internal_static_google_shopping_merchant_conversions_v1_UpdateConversionSourceRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.conversions.v1.ConversionSourcesProto .internal_static_google_shopping_merchant_conversions_v1_UpdateConversionSourceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest.class, com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest.Builder .class); } private int bitField0_; public static final int CONVERSION_SOURCE_FIELD_NUMBER = 1; private com.google.shopping.merchant.conversions.v1.ConversionSource conversionSource_; /** * * * <pre> * Required. The new version of the conversion source data. * </pre> * * <code> * .google.shopping.merchant.conversions.v1.ConversionSource conversion_source = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the conversionSource field is set. */ @java.lang.Override public boolean hasConversionSource() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The new version of the conversion source data. * </pre> * * <code> * .google.shopping.merchant.conversions.v1.ConversionSource conversion_source = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The conversionSource. */ @java.lang.Override public com.google.shopping.merchant.conversions.v1.ConversionSource getConversionSource() { return conversionSource_ == null ? com.google.shopping.merchant.conversions.v1.ConversionSource.getDefaultInstance() : conversionSource_; } /** * * * <pre> * Required. The new version of the conversion source data. * </pre> * * <code> * .google.shopping.merchant.conversions.v1.ConversionSource conversion_source = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.shopping.merchant.conversions.v1.ConversionSourceOrBuilder getConversionSourceOrBuilder() { return conversionSource_ == null ? com.google.shopping.merchant.conversions.v1.ConversionSource.getDefaultInstance() : conversionSource_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Optional. List of fields being updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. List of fields being updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Optional. List of fields being updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getConversionSource()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getConversionSource()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest)) { return super.equals(obj); } com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest other = (com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest) obj; if (hasConversionSource() != other.hasConversionSource()) return false; if (hasConversionSource()) { if (!getConversionSource().equals(other.getConversionSource())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasConversionSource()) { hash = (37 * hash) + CONVERSION_SOURCE_FIELD_NUMBER; hash = (53 * hash) + getConversionSource().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for the UpdateConversionSource method. * </pre> * * Protobuf type {@code google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest) com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.merchant.conversions.v1.ConversionSourcesProto .internal_static_google_shopping_merchant_conversions_v1_UpdateConversionSourceRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.merchant.conversions.v1.ConversionSourcesProto .internal_static_google_shopping_merchant_conversions_v1_UpdateConversionSourceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest.class, com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest.Builder .class); } // Construct using // com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getConversionSourceFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; conversionSource_ = null; if (conversionSourceBuilder_ != null) { conversionSourceBuilder_.dispose(); conversionSourceBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.shopping.merchant.conversions.v1.ConversionSourcesProto .internal_static_google_shopping_merchant_conversions_v1_UpdateConversionSourceRequest_descriptor; } @java.lang.Override public com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest getDefaultInstanceForType() { return com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest .getDefaultInstance(); } @java.lang.Override public com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest build() { com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest buildPartial() { com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest result = new com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.conversionSource_ = conversionSourceBuilder_ == null ? conversionSource_ : conversionSourceBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest) { return mergeFrom( (com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest other) { if (other == com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest .getDefaultInstance()) return this; if (other.hasConversionSource()) { mergeConversionSource(other.getConversionSource()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getConversionSourceFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.shopping.merchant.conversions.v1.ConversionSource conversionSource_; private com.google.protobuf.SingleFieldBuilderV3< com.google.shopping.merchant.conversions.v1.ConversionSource, com.google.shopping.merchant.conversions.v1.ConversionSource.Builder, com.google.shopping.merchant.conversions.v1.ConversionSourceOrBuilder> conversionSourceBuilder_; /** * * * <pre> * Required. The new version of the conversion source data. * </pre> * * <code> * .google.shopping.merchant.conversions.v1.ConversionSource conversion_source = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the conversionSource field is set. */ public boolean hasConversionSource() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The new version of the conversion source data. * </pre> * * <code> * .google.shopping.merchant.conversions.v1.ConversionSource conversion_source = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The conversionSource. */ public com.google.shopping.merchant.conversions.v1.ConversionSource getConversionSource() { if (conversionSourceBuilder_ == null) { return conversionSource_ == null ? com.google.shopping.merchant.conversions.v1.ConversionSource.getDefaultInstance() : conversionSource_; } else { return conversionSourceBuilder_.getMessage(); } } /** * * * <pre> * Required. The new version of the conversion source data. * </pre> * * <code> * .google.shopping.merchant.conversions.v1.ConversionSource conversion_source = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setConversionSource( com.google.shopping.merchant.conversions.v1.ConversionSource value) { if (conversionSourceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } conversionSource_ = value; } else { conversionSourceBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The new version of the conversion source data. * </pre> * * <code> * .google.shopping.merchant.conversions.v1.ConversionSource conversion_source = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setConversionSource( com.google.shopping.merchant.conversions.v1.ConversionSource.Builder builderForValue) { if (conversionSourceBuilder_ == null) { conversionSource_ = builderForValue.build(); } else { conversionSourceBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The new version of the conversion source data. * </pre> * * <code> * .google.shopping.merchant.conversions.v1.ConversionSource conversion_source = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeConversionSource( com.google.shopping.merchant.conversions.v1.ConversionSource value) { if (conversionSourceBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && conversionSource_ != null && conversionSource_ != com.google.shopping.merchant.conversions.v1.ConversionSource .getDefaultInstance()) { getConversionSourceBuilder().mergeFrom(value); } else { conversionSource_ = value; } } else { conversionSourceBuilder_.mergeFrom(value); } if (conversionSource_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The new version of the conversion source data. * </pre> * * <code> * .google.shopping.merchant.conversions.v1.ConversionSource conversion_source = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearConversionSource() { bitField0_ = (bitField0_ & ~0x00000001); conversionSource_ = null; if (conversionSourceBuilder_ != null) { conversionSourceBuilder_.dispose(); conversionSourceBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The new version of the conversion source data. * </pre> * * <code> * .google.shopping.merchant.conversions.v1.ConversionSource conversion_source = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.shopping.merchant.conversions.v1.ConversionSource.Builder getConversionSourceBuilder() { bitField0_ |= 0x00000001; onChanged(); return getConversionSourceFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The new version of the conversion source data. * </pre> * * <code> * .google.shopping.merchant.conversions.v1.ConversionSource conversion_source = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.shopping.merchant.conversions.v1.ConversionSourceOrBuilder getConversionSourceOrBuilder() { if (conversionSourceBuilder_ != null) { return conversionSourceBuilder_.getMessageOrBuilder(); } else { return conversionSource_ == null ? com.google.shopping.merchant.conversions.v1.ConversionSource.getDefaultInstance() : conversionSource_; } } /** * * * <pre> * Required. The new version of the conversion source data. * </pre> * * <code> * .google.shopping.merchant.conversions.v1.ConversionSource conversion_source = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.shopping.merchant.conversions.v1.ConversionSource, com.google.shopping.merchant.conversions.v1.ConversionSource.Builder, com.google.shopping.merchant.conversions.v1.ConversionSourceOrBuilder> getConversionSourceFieldBuilder() { if (conversionSourceBuilder_ == null) { conversionSourceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.shopping.merchant.conversions.v1.ConversionSource, com.google.shopping.merchant.conversions.v1.ConversionSource.Builder, com.google.shopping.merchant.conversions.v1.ConversionSourceOrBuilder>( getConversionSource(), getParentForChildren(), isClean()); conversionSource_ = null; } return conversionSourceBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Optional. List of fields being updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. List of fields being updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Optional. List of fields being updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. List of fields being updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. List of fields being updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Optional. List of fields being updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Optional. List of fields being updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Optional. List of fields being updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Optional. List of fields being updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest) } // @@protoc_insertion_point(class_scope:google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest) private static final com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest(); } public static com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateConversionSourceRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateConversionSourceRequest>() { @java.lang.Override public UpdateConversionSourceRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateConversionSourceRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateConversionSourceRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.shopping.merchant.conversions.v1.UpdateConversionSourceRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
openjdk/jdk8
36,130
corba/src/share/classes/com/sun/corba/se/impl/dynamicany/DynAnyConstructedImpl.java
/* * Copyright (c) 2000, 2003, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package com.sun.corba.se.impl.dynamicany; import org.omg.CORBA.Any; import org.omg.CORBA.TypeCode; import org.omg.CORBA.portable.OutputStream; import org.omg.DynamicAny.*; import org.omg.DynamicAny.DynAnyPackage.TypeMismatch; import org.omg.DynamicAny.DynAnyPackage.InvalidValue; import org.omg.DynamicAny.DynAnyFactoryPackage.InconsistentTypeCode; import com.sun.corba.se.impl.corba.TypeCodeImpl; // needed for recursive type codes import com.sun.corba.se.spi.orb.ORB ; import com.sun.corba.se.spi.logging.CORBALogDomains ; import com.sun.corba.se.impl.logging.ORBUtilSystemException ; abstract class DynAnyConstructedImpl extends DynAnyImpl { protected static final byte REPRESENTATION_NONE = 0; protected static final byte REPRESENTATION_TYPECODE = 1; protected static final byte REPRESENTATION_ANY = 2; protected static final byte REPRESENTATION_COMPONENTS = 4; protected static final byte RECURSIVE_UNDEF = -1; protected static final byte RECURSIVE_NO = 0; protected static final byte RECURSIVE_YES = 1; protected static final DynAny[] emptyComponents = new DynAny[0]; // // Instance variables // // Constructed DynAnys maintain an ordered collection of component DynAnys. DynAny[] components = emptyComponents; byte representations = REPRESENTATION_NONE; byte isRecursive = RECURSIVE_UNDEF; // // Constructors // private DynAnyConstructedImpl() { this(null, (Any)null, false); } protected DynAnyConstructedImpl(ORB orb, Any any, boolean copyValue) { super(orb, any, copyValue); //System.out.println(this + " constructed with any " + any); if (this.any != null) { representations = REPRESENTATION_ANY; } // set the current position to 0 if any has components, otherwise to -1. index = 0; } protected DynAnyConstructedImpl(ORB orb, TypeCode typeCode) { // assertion: typeCode has been checked to be valid for this particular subclass. // note: We don't copy TypeCodes since they are considered immutable. super(orb, typeCode); if (typeCode != null) { representations = REPRESENTATION_TYPECODE; } // set the current position to 0 if any has components, otherwise to -1. index = NO_INDEX; // _REVISIT_ Would need REPRESENTATION_TYPECODE for lazy initialization //if ( ! isRecursive()) { // initializeComponentsFromTypeCode(); //} } protected boolean isRecursive() { if (isRecursive == RECURSIVE_UNDEF) { TypeCode typeCode = any.type(); if (typeCode instanceof TypeCodeImpl) { if (((TypeCodeImpl)typeCode).is_recursive()) isRecursive = RECURSIVE_YES; else isRecursive = RECURSIVE_NO; } else { // No way to find out unless the TypeCode spec changes. isRecursive = RECURSIVE_NO; } } return (isRecursive == RECURSIVE_YES); } // // DynAny traversal methods // public org.omg.DynamicAny.DynAny current_component() throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) { return null; } return (checkInitComponents() ? components[index] : null); } public int component_count() { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } return (checkInitComponents() ? components.length : 0); } public boolean next() { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (checkInitComponents() == false) { return false; } index++; if (index >= 0 && index < components.length) { return true; } else { index = NO_INDEX; return false; } } public boolean seek(int newIndex) { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (newIndex < 0) { this.index = NO_INDEX; return false; } if (checkInitComponents() == false) { return false; } if (newIndex < components.length) { index = newIndex; return true; } return false; } public void rewind() { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } this.seek(0); } // // Utility methods // protected void clearData() { super.clearData(); // _REVISIT_ What about status? components = emptyComponents; index = NO_INDEX; representations = REPRESENTATION_NONE; } protected void writeAny(OutputStream out) { // If all we got is TypeCode representation (no value) // then we don't want to force creating a default value //System.out.println(this + " checkInitAny before writeAny"); checkInitAny(); super.writeAny(out); } // Makes sure that the components representation is initialized protected boolean checkInitComponents() { if ((representations & REPRESENTATION_COMPONENTS) == 0) { if ((representations & REPRESENTATION_ANY) != 0) { if (initializeComponentsFromAny()) { representations |= REPRESENTATION_COMPONENTS; } else { return false; } } else if ((representations & REPRESENTATION_TYPECODE) != 0) { if (initializeComponentsFromTypeCode()) { representations |= REPRESENTATION_COMPONENTS; } else { return false; } } } return true; } // Makes sure that the Any representation is initialized protected void checkInitAny() { if ((representations & REPRESENTATION_ANY) == 0) { //System.out.println(this + " checkInitAny: reps does not have REPRESENTATION_ANY"); if ((representations & REPRESENTATION_COMPONENTS) != 0) { //System.out.println(this + " checkInitAny: reps has REPRESENTATION_COMPONENTS"); if (initializeAnyFromComponents()) { representations |= REPRESENTATION_ANY; } } else if ((representations & REPRESENTATION_TYPECODE) != 0) { //System.out.println(this + " checkInitAny: reps has REPRESENTATION_TYPECODE"); if (representations == REPRESENTATION_TYPECODE && isRecursive()) return; if (initializeComponentsFromTypeCode()) { representations |= REPRESENTATION_COMPONENTS; } if (initializeAnyFromComponents()) { representations |= REPRESENTATION_ANY; } } } else { //System.out.println(this + " checkInitAny: reps != REPRESENTATION_ANY"); } return; } protected abstract boolean initializeComponentsFromAny(); protected abstract boolean initializeComponentsFromTypeCode(); // Collapses the whole DynAny hierarchys values into one single streamed Any protected boolean initializeAnyFromComponents() { //System.out.println(this + " initializeAnyFromComponents"); OutputStream out = any.create_output_stream(); for (int i=0; i<components.length; i++) { if (components[i] instanceof DynAnyImpl) { ((DynAnyImpl)components[i]).writeAny(out); } else { // Not our implementation. Nothing we can do to prevent copying. components[i].to_any().write_value(out); } } any.read_value(out.create_input_stream(), any.type()); return true; } // // DynAny interface methods // public void assign (org.omg.DynamicAny.DynAny dyn_any) throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } clearData(); super.assign(dyn_any); representations = REPRESENTATION_ANY; index = 0; } public void from_any (org.omg.CORBA.Any value) throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } clearData(); super.from_any(value); representations = REPRESENTATION_ANY; index = 0; } // Spec: Returns a copy of the internal Any public org.omg.CORBA.Any to_any() { //System.out.println(this + " to_any "); if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } checkInitAny(); // Anys value may still be uninitialized if DynAny was initialized by TypeCode only return DynAnyUtil.copy(any, orb); } public boolean equal (org.omg.DynamicAny.DynAny dyn_any) { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (dyn_any == this) { return true; } if ( ! any.type().equal(dyn_any.type())) { return false; } // This changes the current position of dyn_any. // Make sure that our position isn't changed. if (checkInitComponents() == false) { return false; } DynAny currentComponent = null; try { // Remember the current position to restore it later currentComponent = dyn_any.current_component(); for (int i=0; i<components.length; i++) { if (dyn_any.seek(i) == false) return false; //System.out.println(this + " comparing component " + i + "=" + components[i] + // " of type " + components[i].type().kind().value()); if ( ! components[i].equal(dyn_any.current_component())) { //System.out.println("Not equal component " + i); return false; } } } catch (TypeMismatch tm) { // impossible, we checked the type codes already } finally { // Restore the current position of the other DynAny DynAnyUtil.set_current_component(dyn_any, currentComponent); } return true; } public void destroy() { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (status == STATUS_DESTROYABLE) { status = STATUS_DESTROYED; for (int i=0; i<components.length; i++) { if (components[i] instanceof DynAnyImpl) { ((DynAnyImpl)components[i]).setStatus(STATUS_DESTROYABLE); } components[i].destroy(); } } } public org.omg.DynamicAny.DynAny copy() { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } checkInitAny(); try { return DynAnyUtil.createMostDerivedDynAny(any, orb, true); } catch (InconsistentTypeCode ictc) { return null; // impossible } } // getter / setter methods public void insert_boolean(boolean value) throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); currentComponent.insert_boolean(value); } public void insert_octet(byte value) throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); currentComponent.insert_octet(value); } public void insert_char(char value) throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); currentComponent.insert_char(value); } public void insert_short(short value) throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); currentComponent.insert_short(value); } public void insert_ushort(short value) throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); currentComponent.insert_ushort(value); } public void insert_long(int value) throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); currentComponent.insert_long(value); } public void insert_ulong(int value) throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); currentComponent.insert_ulong(value); } public void insert_float(float value) throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); currentComponent.insert_float(value); } public void insert_double(double value) throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); currentComponent.insert_double(value); } public void insert_string(String value) throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); currentComponent.insert_string(value); } public void insert_reference(org.omg.CORBA.Object value) throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); currentComponent.insert_reference(value); } public void insert_typecode(org.omg.CORBA.TypeCode value) throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); currentComponent.insert_typecode(value); } public void insert_longlong(long value) throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); currentComponent.insert_longlong(value); } public void insert_ulonglong(long value) throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); currentComponent.insert_ulonglong(value); } public void insert_wchar(char value) throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); currentComponent.insert_wchar(value); } public void insert_wstring(String value) throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); currentComponent.insert_wstring(value); } public void insert_any(org.omg.CORBA.Any value) throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); currentComponent.insert_any(value); } public void insert_dyn_any (org.omg.DynamicAny.DynAny value) throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); currentComponent.insert_dyn_any(value); } public void insert_val(java.io.Serializable value) throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); currentComponent.insert_val(value); } public java.io.Serializable get_val() throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); return currentComponent.get_val(); } public boolean get_boolean() throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); return currentComponent.get_boolean(); } public byte get_octet() throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); return currentComponent.get_octet(); } public char get_char() throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); return currentComponent.get_char(); } public short get_short() throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); return currentComponent.get_short(); } public short get_ushort() throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); return currentComponent.get_ushort(); } public int get_long() throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); return currentComponent.get_long(); } public int get_ulong() throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); return currentComponent.get_ulong(); } public float get_float() throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); return currentComponent.get_float(); } public double get_double() throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); return currentComponent.get_double(); } public String get_string() throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); return currentComponent.get_string(); } public org.omg.CORBA.Object get_reference() throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); return currentComponent.get_reference(); } public org.omg.CORBA.TypeCode get_typecode() throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); return currentComponent.get_typecode(); } public long get_longlong() throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); return currentComponent.get_longlong(); } public long get_ulonglong() throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); return currentComponent.get_ulonglong(); } public char get_wchar() throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); return currentComponent.get_wchar(); } public String get_wstring() throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); return currentComponent.get_wstring(); } public org.omg.CORBA.Any get_any() throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); return currentComponent.get_any(); } public org.omg.DynamicAny.DynAny get_dyn_any() throws org.omg.DynamicAny.DynAnyPackage.TypeMismatch, org.omg.DynamicAny.DynAnyPackage.InvalidValue { if (status == STATUS_DESTROYED) { throw wrapper.dynAnyDestroyed() ; } if (index == NO_INDEX) throw new org.omg.DynamicAny.DynAnyPackage.InvalidValue(); DynAny currentComponent = current_component(); if (DynAnyUtil.isConstructedDynAny(currentComponent)) throw new org.omg.DynamicAny.DynAnyPackage.TypeMismatch(); return currentComponent.get_dyn_any(); } }
googleapis/google-cloud-java
35,851
java-dataflow/proto-google-cloud-dataflow-v1beta3/src/main/java/com/google/dataflow/v1beta3/Step.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/dataflow/v1beta3/jobs.proto // Protobuf Java Version: 3.25.8 package com.google.dataflow.v1beta3; /** * * * <pre> * Defines a particular step within a Cloud Dataflow job. * * A job consists of multiple steps, each of which performs some * specific operation as part of the overall job. Data is typically * passed from one step to another as part of the job. * * **Note:** The properties of this object are not stable and might change. * * Here's an example of a sequence of steps which together implement a * Map-Reduce job: * * * Read a collection of data from some source, parsing the * collection's elements. * * * Validate the elements. * * * Apply a user-defined function to map each element to some value * and extract an element-specific key value. * * * Group elements with the same key into a single element with * that key, transforming a multiply-keyed collection into a * uniquely-keyed collection. * * * Write the elements out to some data sink. * * Note that the Cloud Dataflow service may be used to run many different * types of jobs, not just Map-Reduce. * </pre> * * Protobuf type {@code google.dataflow.v1beta3.Step} */ public final class Step extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.dataflow.v1beta3.Step) StepOrBuilder { private static final long serialVersionUID = 0L; // Use Step.newBuilder() to construct. private Step(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private Step() { kind_ = ""; name_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new Step(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.dataflow.v1beta3.JobsProto .internal_static_google_dataflow_v1beta3_Step_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.dataflow.v1beta3.JobsProto .internal_static_google_dataflow_v1beta3_Step_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.dataflow.v1beta3.Step.class, com.google.dataflow.v1beta3.Step.Builder.class); } private int bitField0_; public static final int KIND_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object kind_ = ""; /** * * * <pre> * The kind of step in the Cloud Dataflow job. * </pre> * * <code>string kind = 1;</code> * * @return The kind. */ @java.lang.Override public java.lang.String getKind() { java.lang.Object ref = kind_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); kind_ = s; return s; } } /** * * * <pre> * The kind of step in the Cloud Dataflow job. * </pre> * * <code>string kind = 1;</code> * * @return The bytes for kind. */ @java.lang.Override public com.google.protobuf.ByteString getKindBytes() { java.lang.Object ref = kind_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); kind_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int NAME_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * The name that identifies the step. This must be unique for each * step with respect to all other steps in the Cloud Dataflow job. * </pre> * * <code>string name = 2;</code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * The name that identifies the step. This must be unique for each * step with respect to all other steps in the Cloud Dataflow job. * </pre> * * <code>string name = 2;</code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PROPERTIES_FIELD_NUMBER = 3; private com.google.protobuf.Struct properties_; /** * * * <pre> * Named properties associated with the step. Each kind of * predefined step has its own required set of properties. * Must be provided on Create. Only retrieved with JOB_VIEW_ALL. * </pre> * * <code>.google.protobuf.Struct properties = 3;</code> * * @return Whether the properties field is set. */ @java.lang.Override public boolean hasProperties() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Named properties associated with the step. Each kind of * predefined step has its own required set of properties. * Must be provided on Create. Only retrieved with JOB_VIEW_ALL. * </pre> * * <code>.google.protobuf.Struct properties = 3;</code> * * @return The properties. */ @java.lang.Override public com.google.protobuf.Struct getProperties() { return properties_ == null ? com.google.protobuf.Struct.getDefaultInstance() : properties_; } /** * * * <pre> * Named properties associated with the step. Each kind of * predefined step has its own required set of properties. * Must be provided on Create. Only retrieved with JOB_VIEW_ALL. * </pre> * * <code>.google.protobuf.Struct properties = 3;</code> */ @java.lang.Override public com.google.protobuf.StructOrBuilder getPropertiesOrBuilder() { return properties_ == null ? com.google.protobuf.Struct.getDefaultInstance() : properties_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(kind_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, kind_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, name_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(3, getProperties()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(kind_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, kind_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, name_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getProperties()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.dataflow.v1beta3.Step)) { return super.equals(obj); } com.google.dataflow.v1beta3.Step other = (com.google.dataflow.v1beta3.Step) obj; if (!getKind().equals(other.getKind())) return false; if (!getName().equals(other.getName())) return false; if (hasProperties() != other.hasProperties()) return false; if (hasProperties()) { if (!getProperties().equals(other.getProperties())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + KIND_FIELD_NUMBER; hash = (53 * hash) + getKind().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); if (hasProperties()) { hash = (37 * hash) + PROPERTIES_FIELD_NUMBER; hash = (53 * hash) + getProperties().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.dataflow.v1beta3.Step parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.dataflow.v1beta3.Step parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.dataflow.v1beta3.Step parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.dataflow.v1beta3.Step parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.dataflow.v1beta3.Step parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.dataflow.v1beta3.Step parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.dataflow.v1beta3.Step parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.dataflow.v1beta3.Step parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.dataflow.v1beta3.Step parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.dataflow.v1beta3.Step parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.dataflow.v1beta3.Step parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.dataflow.v1beta3.Step parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.dataflow.v1beta3.Step prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Defines a particular step within a Cloud Dataflow job. * * A job consists of multiple steps, each of which performs some * specific operation as part of the overall job. Data is typically * passed from one step to another as part of the job. * * **Note:** The properties of this object are not stable and might change. * * Here's an example of a sequence of steps which together implement a * Map-Reduce job: * * * Read a collection of data from some source, parsing the * collection's elements. * * * Validate the elements. * * * Apply a user-defined function to map each element to some value * and extract an element-specific key value. * * * Group elements with the same key into a single element with * that key, transforming a multiply-keyed collection into a * uniquely-keyed collection. * * * Write the elements out to some data sink. * * Note that the Cloud Dataflow service may be used to run many different * types of jobs, not just Map-Reduce. * </pre> * * Protobuf type {@code google.dataflow.v1beta3.Step} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.dataflow.v1beta3.Step) com.google.dataflow.v1beta3.StepOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.dataflow.v1beta3.JobsProto .internal_static_google_dataflow_v1beta3_Step_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.dataflow.v1beta3.JobsProto .internal_static_google_dataflow_v1beta3_Step_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.dataflow.v1beta3.Step.class, com.google.dataflow.v1beta3.Step.Builder.class); } // Construct using com.google.dataflow.v1beta3.Step.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getPropertiesFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; kind_ = ""; name_ = ""; properties_ = null; if (propertiesBuilder_ != null) { propertiesBuilder_.dispose(); propertiesBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.dataflow.v1beta3.JobsProto .internal_static_google_dataflow_v1beta3_Step_descriptor; } @java.lang.Override public com.google.dataflow.v1beta3.Step getDefaultInstanceForType() { return com.google.dataflow.v1beta3.Step.getDefaultInstance(); } @java.lang.Override public com.google.dataflow.v1beta3.Step build() { com.google.dataflow.v1beta3.Step result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.dataflow.v1beta3.Step buildPartial() { com.google.dataflow.v1beta3.Step result = new com.google.dataflow.v1beta3.Step(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.dataflow.v1beta3.Step result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.kind_ = kind_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.name_ = name_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000004) != 0)) { result.properties_ = propertiesBuilder_ == null ? properties_ : propertiesBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.dataflow.v1beta3.Step) { return mergeFrom((com.google.dataflow.v1beta3.Step) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.dataflow.v1beta3.Step other) { if (other == com.google.dataflow.v1beta3.Step.getDefaultInstance()) return this; if (!other.getKind().isEmpty()) { kind_ = other.kind_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasProperties()) { mergeProperties(other.getProperties()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { kind_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { input.readMessage(getPropertiesFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object kind_ = ""; /** * * * <pre> * The kind of step in the Cloud Dataflow job. * </pre> * * <code>string kind = 1;</code> * * @return The kind. */ public java.lang.String getKind() { java.lang.Object ref = kind_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); kind_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The kind of step in the Cloud Dataflow job. * </pre> * * <code>string kind = 1;</code> * * @return The bytes for kind. */ public com.google.protobuf.ByteString getKindBytes() { java.lang.Object ref = kind_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); kind_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The kind of step in the Cloud Dataflow job. * </pre> * * <code>string kind = 1;</code> * * @param value The kind to set. * @return This builder for chaining. */ public Builder setKind(java.lang.String value) { if (value == null) { throw new NullPointerException(); } kind_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The kind of step in the Cloud Dataflow job. * </pre> * * <code>string kind = 1;</code> * * @return This builder for chaining. */ public Builder clearKind() { kind_ = getDefaultInstance().getKind(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * The kind of step in the Cloud Dataflow job. * </pre> * * <code>string kind = 1;</code> * * @param value The bytes for kind to set. * @return This builder for chaining. */ public Builder setKindBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); kind_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object name_ = ""; /** * * * <pre> * The name that identifies the step. This must be unique for each * step with respect to all other steps in the Cloud Dataflow job. * </pre> * * <code>string name = 2;</code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The name that identifies the step. This must be unique for each * step with respect to all other steps in the Cloud Dataflow job. * </pre> * * <code>string name = 2;</code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The name that identifies the step. This must be unique for each * step with respect to all other steps in the Cloud Dataflow job. * </pre> * * <code>string name = 2;</code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The name that identifies the step. This must be unique for each * step with respect to all other steps in the Cloud Dataflow job. * </pre> * * <code>string name = 2;</code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The name that identifies the step. This must be unique for each * step with respect to all other steps in the Cloud Dataflow job. * </pre> * * <code>string name = 2;</code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private com.google.protobuf.Struct properties_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Struct, com.google.protobuf.Struct.Builder, com.google.protobuf.StructOrBuilder> propertiesBuilder_; /** * * * <pre> * Named properties associated with the step. Each kind of * predefined step has its own required set of properties. * Must be provided on Create. Only retrieved with JOB_VIEW_ALL. * </pre> * * <code>.google.protobuf.Struct properties = 3;</code> * * @return Whether the properties field is set. */ public boolean hasProperties() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Named properties associated with the step. Each kind of * predefined step has its own required set of properties. * Must be provided on Create. Only retrieved with JOB_VIEW_ALL. * </pre> * * <code>.google.protobuf.Struct properties = 3;</code> * * @return The properties. */ public com.google.protobuf.Struct getProperties() { if (propertiesBuilder_ == null) { return properties_ == null ? com.google.protobuf.Struct.getDefaultInstance() : properties_; } else { return propertiesBuilder_.getMessage(); } } /** * * * <pre> * Named properties associated with the step. Each kind of * predefined step has its own required set of properties. * Must be provided on Create. Only retrieved with JOB_VIEW_ALL. * </pre> * * <code>.google.protobuf.Struct properties = 3;</code> */ public Builder setProperties(com.google.protobuf.Struct value) { if (propertiesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } properties_ = value; } else { propertiesBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Named properties associated with the step. Each kind of * predefined step has its own required set of properties. * Must be provided on Create. Only retrieved with JOB_VIEW_ALL. * </pre> * * <code>.google.protobuf.Struct properties = 3;</code> */ public Builder setProperties(com.google.protobuf.Struct.Builder builderForValue) { if (propertiesBuilder_ == null) { properties_ = builderForValue.build(); } else { propertiesBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Named properties associated with the step. Each kind of * predefined step has its own required set of properties. * Must be provided on Create. Only retrieved with JOB_VIEW_ALL. * </pre> * * <code>.google.protobuf.Struct properties = 3;</code> */ public Builder mergeProperties(com.google.protobuf.Struct value) { if (propertiesBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && properties_ != null && properties_ != com.google.protobuf.Struct.getDefaultInstance()) { getPropertiesBuilder().mergeFrom(value); } else { properties_ = value; } } else { propertiesBuilder_.mergeFrom(value); } if (properties_ != null) { bitField0_ |= 0x00000004; onChanged(); } return this; } /** * * * <pre> * Named properties associated with the step. Each kind of * predefined step has its own required set of properties. * Must be provided on Create. Only retrieved with JOB_VIEW_ALL. * </pre> * * <code>.google.protobuf.Struct properties = 3;</code> */ public Builder clearProperties() { bitField0_ = (bitField0_ & ~0x00000004); properties_ = null; if (propertiesBuilder_ != null) { propertiesBuilder_.dispose(); propertiesBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Named properties associated with the step. Each kind of * predefined step has its own required set of properties. * Must be provided on Create. Only retrieved with JOB_VIEW_ALL. * </pre> * * <code>.google.protobuf.Struct properties = 3;</code> */ public com.google.protobuf.Struct.Builder getPropertiesBuilder() { bitField0_ |= 0x00000004; onChanged(); return getPropertiesFieldBuilder().getBuilder(); } /** * * * <pre> * Named properties associated with the step. Each kind of * predefined step has its own required set of properties. * Must be provided on Create. Only retrieved with JOB_VIEW_ALL. * </pre> * * <code>.google.protobuf.Struct properties = 3;</code> */ public com.google.protobuf.StructOrBuilder getPropertiesOrBuilder() { if (propertiesBuilder_ != null) { return propertiesBuilder_.getMessageOrBuilder(); } else { return properties_ == null ? com.google.protobuf.Struct.getDefaultInstance() : properties_; } } /** * * * <pre> * Named properties associated with the step. Each kind of * predefined step has its own required set of properties. * Must be provided on Create. Only retrieved with JOB_VIEW_ALL. * </pre> * * <code>.google.protobuf.Struct properties = 3;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Struct, com.google.protobuf.Struct.Builder, com.google.protobuf.StructOrBuilder> getPropertiesFieldBuilder() { if (propertiesBuilder_ == null) { propertiesBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Struct, com.google.protobuf.Struct.Builder, com.google.protobuf.StructOrBuilder>( getProperties(), getParentForChildren(), isClean()); properties_ = null; } return propertiesBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.dataflow.v1beta3.Step) } // @@protoc_insertion_point(class_scope:google.dataflow.v1beta3.Step) private static final com.google.dataflow.v1beta3.Step DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.dataflow.v1beta3.Step(); } public static com.google.dataflow.v1beta3.Step getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<Step> PARSER = new com.google.protobuf.AbstractParser<Step>() { @java.lang.Override public Step parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<Step> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<Step> getParserForType() { return PARSER; } @java.lang.Override public com.google.dataflow.v1beta3.Step getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/commons-numbers
36,329
commons-numbers-complex/src/test/java/org/apache/commons/numbers/complex/ComplexEdgeCaseTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.numbers.complex; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; import java.math.BigDecimal; import java.util.function.BiFunction; import java.util.function.UnaryOperator; /** * Edge case tests for the functions defined by the C.99 standard for complex numbers * defined in ISO/IEC 9899, Annex G. * * <p>The test contained here are specifically written to target edge cases of finite valued * input values that cause overflow/underflow during the computation. * * <p>The test data is generated from a known implementation of the standard. * * @see <a href="http://www.open-std.org/JTC1/SC22/WG14/www/standards"> * ISO/IEC 9899 - Programming languages - C</a> */ class ComplexEdgeCaseTest { private static final double inf = Double.POSITIVE_INFINITY; private static final double nan = Double.NaN; /** * Assert the operation on the complex number is equal to the expected value. * * <p>The results are are considered equal if there are no floating-point values between them. * * @param a Real part. * @param b Imaginary part. * @param name The operation name. * @param operation The operation. * @param x Expected real part. * @param y Expected imaginary part. */ private static void assertComplex(double a, double b, String name, UnaryOperator<Complex> operation, double x, double y) { assertComplex(a, b, name, operation, x, y, 1); } /** * Assert the operation on the complex number is equal to the expected value. * * <p>The results are considered equal within the provided units of least * precision. The maximum count of numbers allowed between the two values is * {@code maxUlps - 1}. * * @param a Real part. * @param b Imaginary part. * @param name The operation name. * @param operation The operation. * @param x Expected real part. * @param y Expected imaginary part. * @param maxUlps the maximum units of least precision between the two values */ private static void assertComplex(double a, double b, String name, UnaryOperator<Complex> operation, double x, double y, long maxUlps) { final Complex c = Complex.ofCartesian(a, b); final Complex e = Complex.ofCartesian(x, y); CReferenceTest.assertComplex(c, name, operation, e, maxUlps); } /** * Assert the operation on the complex numbers is equal to the expected value. * * <p>The results are considered equal if there are no floating-point values between them. * * @param a Real part of first number. * @param b Imaginary part of first number. * @param c Real part of second number. * @param d Imaginary part of second number. * @param name The operation name. * @param operation The operation. * @param x Expected real part. * @param y Expected imaginary part. */ // CHECKSTYLE: stop ParameterNumberCheck private static void assertComplex(double a, double b, double c, double d, String name, BiFunction<Complex, Complex, Complex> operation, double x, double y) { assertComplex(a, b, c, d, name, operation, x, y, 1); } /** * Assert the operation on the complex numbers is equal to the expected value. * * <p>The results are considered equal within the provided units of least * precision. The maximum count of numbers allowed between the two values is * {@code maxUlps - 1}. * * @param a Real part of first number. * @param b Imaginary part of first number. * @param c Real part of second number. * @param d Imaginary part of second number. * @param name The operation name * @param operation the operation * @param x Expected real part. * @param y Expected imaginary part. * @param maxUlps the maximum units of least precision between the two values */ private static void assertComplex(double a, double b, double c, double d, String name, BiFunction<Complex, Complex, Complex> operation, double x, double y, long maxUlps) { final Complex c1 = Complex.ofCartesian(a, b); final Complex c2 = Complex.ofCartesian(c, d); final Complex e = Complex.ofCartesian(x, y); CReferenceTest.assertComplex(c1, c2, name, operation, e, maxUlps); } @Test void testAcos() { // acos(z) = (pi / 2) + i ln(iz + sqrt(1 - z^2)) final String name = "acos"; final UnaryOperator<Complex> operation = Complex::acos; // Edge cases are when values are big but not infinite and small but not zero. // Big and small are set using the limits in atanh. // A medium value is used to test outside the range of the CReferenceTest. // The results have been generated using g++ -std=c++11 acos. // xp1 * xm1 will overflow: final double huge = Math.sqrt(Double.MAX_VALUE) * 2; final double big = Math.sqrt(Double.MAX_VALUE) / 8; final double medium = 100; final double small = Math.sqrt(Double.MIN_NORMAL) * 4; assertComplex(huge, big, name, operation, 0.06241880999595735, -356.27960012801969); assertComplex(huge, medium, name, operation, 3.7291703656001039e-153, -356.27765080781188); assertComplex(huge, small, name, operation, 2.2250738585072019e-308, -356.27765080781188); assertComplex(big, big, name, operation, 0.78539816339744828, -353.85163567585209); assertComplex(big, medium, name, operation, 5.9666725849601662e-152, -353.50506208557209); assertComplex(big, small, name, operation, 3.560118173611523e-307, -353.50506208557209); assertComplex(medium, big, name, operation, 1.5707963267948966, -353.50506208557209); assertComplex(medium, medium, name, operation, 0.78541066339744181, -5.6448909570623842); assertComplex(medium, small, name, operation, 5.9669709409662999e-156, -5.298292365610485); assertComplex(small, big, name, operation, 1.5707963267948966, -353.50506208557209); assertComplex(small, medium, name, operation, 1.5707963267948966, -5.2983423656105888); assertComplex(small, small, name, operation, 1.5707963267948966, -5.9666725849601654e-154); // Additional cases to achieve full coverage // xm1 = 0 assertComplex(1, small, name, operation, 2.4426773395109241e-77, -2.4426773395109241e-77); // https://svn.boost.org/trac10/ticket/7290 assertComplex(1.00000002785941, 5.72464869028403e-200, name, operation, 2.4252018043912224e-196, -0.00023604834149293664); } // acosh is defined by acos so is not tested @Test void testAsin() { // asin(z) = -i (ln(iz + sqrt(1 - z^2))) final String name = "asin"; final UnaryOperator<Complex> operation = Complex::asin; // This method is essentially the same as acos and the edge cases are the same. // The results have been generated using g++ -std=c++11 asin. final double huge = Math.sqrt(Double.MAX_VALUE) * 2; final double big = Math.sqrt(Double.MAX_VALUE) / 8; final double medium = 100; final double small = Math.sqrt(Double.MIN_NORMAL) * 4; assertComplex(huge, big, name, operation, 1.5083775167989393, 356.27960012801969); assertComplex(huge, medium, name, operation, 1.5707963267948966, 356.27765080781188); assertComplex(huge, small, name, operation, 1.5707963267948966, 356.27765080781188); assertComplex(big, big, name, operation, 0.78539816339744828, 353.85163567585209); assertComplex(big, medium, name, operation, 1.5707963267948966, 353.50506208557209); assertComplex(big, small, name, operation, 1.5707963267948966, 353.50506208557209); assertComplex(medium, big, name, operation, 5.9666725849601662e-152, 353.50506208557209); assertComplex(medium, medium, name, operation, 0.78538566339745486, 5.6448909570623842); assertComplex(medium, small, name, operation, 1.5707963267948966, 5.298292365610485); assertComplex(small, big, name, operation, 3.560118173611523e-307, 353.50506208557209); assertComplex(small, medium, name, operation, 5.9663742737040751e-156, 5.2983423656105888); assertComplex(small, small, name, operation, 5.9666725849601654e-154, 5.9666725849601654e-154); // Additional cases to achieve full coverage // xm1 = 0 assertComplex(1, small, name, operation, 1.5707963267948966, 2.4426773395109241e-77); // https://svn.boost.org/trac10/ticket/7290 assertComplex(1.00000002785941, 5.72464869028403e-200, name, operation, 1.5707963267948966, 0.00023604834149293664); } // asinh is defined by asin so is not tested @Test void testAtanh() { // atanh(z) = (1/2) ln((1 + z) / (1 - z)) // Odd function: negative real cases defined by positive real cases final String name = "atanh"; final UnaryOperator<Complex> operation = Complex::atanh; // Edge cases are when values are big but not infinite and small but not zero. // Big and small are set using the limits in atanh. // A medium value is used to test outside the range of the CReferenceTest. // It hits an edge case when x is big and y > 1. // The results have been generated using g++ -std=c++11 atanh. final double big = Math.sqrt(Double.MAX_VALUE) / 2; final double medium = 100; final double small = Math.sqrt(Double.MIN_NORMAL) * 2; assertComplex(big, big, name, operation, 7.4583407312002067e-155, 1.5707963267948966); assertComplex(big, medium, name, operation, 1.4916681462400417e-154, 1.5707963267948966); assertComplex(big, small, name, operation, 1.4916681462400417e-154, 1.5707963267948966); assertComplex(medium, big, name, operation, 2.225073858507202e-306, 1.5707963267948966); assertComplex(medium, medium, name, operation, 0.0049999166641667555, 1.5657962434640633); assertComplex(medium, small, name, operation, 0.010000333353334761, 1.5707963267948966); assertComplex(small, big, name, operation, 0, 1.5707963267948966); assertComplex(small, medium, name, operation, 2.9830379886812147e-158, 1.5607966601082315); assertComplex(small, small, name, operation, 2.9833362924800827e-154, 2.9833362924800827e-154); // Additional cases to achieve full coverage assertComplex(inf, big, name, operation, 0, 1.5707963267948966); assertComplex(big, inf, name, operation, 0, 1.5707963267948966); } @Test void testCosh() { // cosh(a + b i) = cosh(a)cos(b) + i sinh(a)sin(b) // Even function: negative real cases defined by positive real cases final String name = "cosh"; final UnaryOperator<Complex> operation = Complex::cosh; // Implementation defers to java.util.Math. // Hit edge cases for extreme values. final double big = Double.MAX_VALUE; final double medium = 2; final double small = Double.MIN_NORMAL; assertComplex(big, big, name, operation, -inf, inf); assertComplex(big, medium, name, operation, -inf, inf); assertComplex(big, small, name, operation, inf, inf); assertComplex(medium, big, name, operation, -3.7621493762972804, 0.017996317370418576); assertComplex(medium, medium, name, operation, -1.5656258353157435, 3.297894836311237); assertComplex(medium, small, name, operation, 3.7621956910836314, 8.0700322819551687e-308); assertComplex(small, big, name, operation, -0.99998768942655991, 1.1040715888508271e-310); assertComplex(small, medium, name, operation, -0.41614683654714241, 2.0232539340376892e-308); assertComplex(small, small, name, operation, 1, 0); // Overflow test. // Based on MATH-901 discussion of FastMath functionality. // https://issues.apache.org/jira/browse/MATH-901#comment-13500669 // sinh(x)/cosh(x) can be approximated by exp(x) but must be overflow safe. // sinh(x) = sign(x) * e^|x| / 2 when x is large. // cosh(x) = e^|x| / 2 when x is large. // Thus e^|x| can overflow but e^|x| / 2 may not. // (e^|x| / 2) * sin/cos will always be smaller. final double tiny = Double.MIN_VALUE; final double x = 709.783; Assertions.assertEquals(inf, Math.exp(x)); // As computed by GNU g++ assertComplex(x, 0, name, operation, 8.9910466927705402e+307, 0.0); assertComplex(-x, 0, name, operation, 8.9910466927705402e+307, -0.0); // sub-normal number x: // cos(x) = 1 => real = (e^|x| / 2) // sin(x) = x => imaginary = x * (e^|x| / 2) assertComplex(x, small, name, operation, 8.9910466927705402e+307, 2.0005742956701358); assertComplex(-x, small, name, operation, 8.9910466927705402e+307, -2.0005742956701358); assertComplex(x, tiny, name, operation, 8.9910466927705402e+307, 4.4421672910524807e-16); assertComplex(-x, tiny, name, operation, 8.9910466927705402e+307, -4.4421672910524807e-16); // Should not overflow imaginary. assertComplex(2 * x, tiny, name, operation, inf, 7.9879467061901743e+292); assertComplex(-2 * x, tiny, name, operation, inf, -7.9879467061901743e+292); // Test when large enough to overflow any non-zero value to infinity. Result should be // as if x was infinite and y was finite. assertComplex(3 * x, tiny, name, operation, inf, inf); assertComplex(-3 * x, tiny, name, operation, inf, -inf); // pi / 2 x: // cos(x) = ~0 => real = x * (e^|x| / 2) // sin(x) = ~1 => imaginary = (e^|x| / 2) final double pi2 = Math.PI / 2; assertComplex(x, pi2, name, operation, 5.5054282766429199e+291, 8.9910466927705402e+307); assertComplex(-x, pi2, name, operation, 5.5054282766429199e+291, -8.9910466927705402e+307); assertComplex(2 * x, pi2, name, operation, inf, inf); assertComplex(-2 * x, pi2, name, operation, inf, -inf); // Test when large enough to overflow any non-zero value to infinity. Result should be // as if x was infinite and y was finite. assertComplex(3 * x, pi2, name, operation, inf, inf); assertComplex(-3 * x, pi2, name, operation, inf, -inf); } @Test void testSinh() { // sinh(a + b i) = sinh(a)cos(b) + i cosh(a)sin(b) // Odd function: negative real cases defined by positive real cases final String name = "sinh"; final UnaryOperator<Complex> operation = Complex::sinh; // Implementation defers to java.util.Math. // Hit edge cases for extreme values. final double big = Double.MAX_VALUE; final double medium = 2; final double small = Double.MIN_NORMAL; assertComplex(big, big, name, operation, -inf, inf); assertComplex(big, medium, name, operation, -inf, inf); assertComplex(big, small, name, operation, inf, inf); assertComplex(medium, big, name, operation, -3.6268157591156114, 0.018667844927220067); assertComplex(medium, medium, name, operation, -1.5093064853236158, 3.4209548611170133); assertComplex(medium, small, name, operation, 3.626860407847019, 8.3711632828186228e-308); assertComplex(small, big, name, operation, -2.2250464665720564e-308, 0.004961954789184062); assertComplex(small, medium, name, operation, -9.2595744730151568e-309, 0.90929742682568171); assertComplex(small, small, name, operation, 2.2250738585072014e-308, 2.2250738585072014e-308); // Overflow test. // As per cosh with sign changes to real and imaginary // sinh(x) = sign(x) * e^|x| / 2 when x is large. // cosh(x) = e^|x| / 2 when x is large. // Thus e^|x| can overflow but e^|x| / 2 may not. // sinh(x) * sin/cos will always be smaller. final double tiny = Double.MIN_VALUE; final double x = 709.783; Assertions.assertEquals(inf, Math.exp(x)); // As computed by GNU g++ assertComplex(x, 0, name, operation, 8.9910466927705402e+307, 0.0); assertComplex(-x, 0, name, operation, -8.9910466927705402e+307, 0.0); // sub-normal number: // cos(x) = 1 => real = (e^|x| / 2) // sin(x) = x => imaginary = x * (e^|x| / 2) assertComplex(x, small, name, operation, 8.9910466927705402e+307, 2.0005742956701358); assertComplex(-x, small, name, operation, -8.9910466927705402e+307, 2.0005742956701358); assertComplex(x, tiny, name, operation, 8.9910466927705402e+307, 4.4421672910524807e-16); assertComplex(-x, tiny, name, operation, -8.9910466927705402e+307, 4.4421672910524807e-16); // Should not overflow imaginary. assertComplex(2 * x, tiny, name, operation, inf, 7.9879467061901743e+292); assertComplex(-2 * x, tiny, name, operation, -inf, 7.9879467061901743e+292); // Test when large enough to overflow any non-zero value to infinity. Result should be // as if x was infinite and y was finite. assertComplex(3 * x, tiny, name, operation, inf, inf); assertComplex(-3 * x, tiny, name, operation, -inf, inf); // pi / 2 x: // cos(x) = ~0 => real = x * (e^|x| / 2) // sin(x) = ~1 => imaginary = (e^|x| / 2) final double pi2 = Math.PI / 2; assertComplex(x, pi2, name, operation, 5.5054282766429199e+291, 8.9910466927705402e+307); assertComplex(-x, pi2, name, operation, -5.5054282766429199e+291, 8.9910466927705402e+307); assertComplex(2 * x, pi2, name, operation, inf, inf); assertComplex(-2 * x, pi2, name, operation, -inf, inf); // Test when large enough to overflow any non-zero value to infinity. Result should be // as if x was infinite and y was finite. assertComplex(3 * x, pi2, name, operation, inf, inf); assertComplex(-3 * x, pi2, name, operation, -inf, inf); } @Test void testTanh() { // tan(a + b i) = sinh(2a)/(cosh(2a)+cos(2b)) + i [sin(2b)/(cosh(2a)+cos(2b))] // Odd function: negative real cases defined by positive real cases final String name = "tanh"; final UnaryOperator<Complex> operation = Complex::tanh; // Overflow on 2b: // cos(2b) = cos(inf) = NaN // sin(2b) = sin(inf) = NaN assertComplex(1, Double.MAX_VALUE, name, operation, 0.76160203106265523, -0.0020838895895863505); // Underflow on 2b: // cos(2b) -> 1 // sin(2b) -> 0 assertComplex(1, Double.MIN_NORMAL, name, operation, 0.76159415595576485, 9.344739287691424e-309); assertComplex(1, Double.MIN_VALUE, name, operation, 0.76159415595576485, 0); // Overflow on 2a: // sinh(2a) = sinh(inf) = inf // cosh(2a) = cosh(inf) = inf // Test all sign variants as this execution path to treat real as infinite // is not tested else where. assertComplex(Double.MAX_VALUE, 1, name, operation, 1, 0.0); assertComplex(Double.MAX_VALUE, -1, name, operation, 1, -0.0); assertComplex(-Double.MAX_VALUE, 1, name, operation, -1, 0.0); assertComplex(-Double.MAX_VALUE, -1, name, operation, -1, -0.0); // Underflow on 2a: // sinh(2a) -> 0 // cosh(2a) -> 0 assertComplex(Double.MIN_NORMAL, 1, name, operation, 7.6220323800193346e-308, 1.5574077246549021); assertComplex(Double.MIN_VALUE, 1, name, operation, 1.4821969375237396e-323, 1.5574077246549021); // Underflow test. // sinh(x) can be approximated by exp(x) but must be overflow safe. // im = 2 sin(2y) / e^2|x| // This can be computed when e^2|x| only just overflows. // Set a case where e^2|x| overflows but the imaginary can be computed double x = 709.783 / 2; double y = Math.PI / 4; Assertions.assertEquals(1.0, Math.sin(2 * y), 1e-16); Assertions.assertEquals(Double.POSITIVE_INFINITY, Math.exp(2 * x)); // As computed by GNU g++ assertComplex(x, y, name, operation, 1, 1.1122175583895849e-308); } @Test void testExp() { final String name = "exp"; final UnaryOperator<Complex> operation = Complex::exp; // exp(a + b i) = exp(a) (cos(b) + i sin(b)) // Overflow if exp(a) == inf assertComplex(1000, 0, name, operation, inf, 0.0); assertComplex(1000, 1, name, operation, inf, inf); assertComplex(1000, 2, name, operation, -inf, inf); assertComplex(1000, 3, name, operation, -inf, inf); assertComplex(1000, 4, name, operation, -inf, -inf); // Underflow if exp(a) == 0 assertComplex(-1000, 0, name, operation, 0.0, 0.0); assertComplex(-1000, 1, name, operation, 0.0, 0.0); assertComplex(-1000, 2, name, operation, -0.0, 0.0); assertComplex(-1000, 3, name, operation, -0.0, 0.0); assertComplex(-1000, 4, name, operation, -0.0, -0.0); } @Test void testLog() { final String name = "log"; final UnaryOperator<Complex> operation = Complex::log; // ln(a + b i) = ln(|a + b i|) + i arg(a + b i) // |a + b i| = sqrt(a^2 + b^2) // arg(a + b i) = Math.atan2(imaginary, real) // Overflow if sqrt(a^2 + b^2) == inf. // Matlab computes this. assertComplex(-Double.MAX_VALUE, Double.MAX_VALUE, name, operation, 7.101292864836639e2, Math.PI * 3 / 4); assertComplex(Double.MAX_VALUE, Double.MAX_VALUE, name, operation, 7.101292864836639e2, Math.PI / 4); assertComplex(-Double.MAX_VALUE, Double.MAX_VALUE / 4, name, operation, 7.098130252042921e2, 2.896613990462929); assertComplex(Double.MAX_VALUE, Double.MAX_VALUE / 4, name, operation, 7.098130252042921e2, 2.449786631268641e-1, 2); // Underflow if sqrt(a^2 + b^2) -> 0 assertComplex(-Double.MIN_NORMAL, Double.MIN_NORMAL, name, operation, -708.04984494198413, 2.3561944901923448); assertComplex(Double.MIN_NORMAL, Double.MIN_NORMAL, name, operation, -708.04984494198413, 0.78539816339744828); // Math.hypot(min, min) = min. // To compute the expected result do scaling of the actual hypot = sqrt(2). // log(a/n) = log(a) - log(n) // n = 2^1074 => log(a) - log(2) * 1074 double expected = Math.log(Math.sqrt(2)) - Math.log(2) * 1074; assertComplex(-Double.MIN_VALUE, Double.MIN_VALUE, name, operation, expected, Math.atan2(1, -1)); expected = Math.log(Math.sqrt(5)) - Math.log(2) * 1074; assertComplex(-Double.MIN_VALUE, 2 * Double.MIN_VALUE, name, operation, expected, Math.atan2(2, -1)); // Imprecision if sqrt(a^2 + b^2) == 1 as log(1) is 0. // Method should switch to using log1p(x^2 + x^2 - 1) * 0.5. // In the following: // max = max(real, imaginary) // min = min(real, imaginary) // No cancellation error when max > 1 assertLog(1.0001, Math.sqrt(1.2 - 1.0001 * 1.0001), 1); assertLog(1.0001, Math.sqrt(1.1 - 1.0001 * 1.0001), 1); assertLog(1.0001, Math.sqrt(1.02 - 1.0001 * 1.0001), 0); assertLog(1.0001, Math.sqrt(1.01 - 1.0001 * 1.0001), 0); // Cancellation error when max < 1. // Hard: 4 * min^2 < |max^2 - 1| // Gets harder as max is further from 1 assertLog(0.99, 0.00001, 0); assertLog(0.95, 0.00001, 0); assertLog(0.9, 0.00001, 0); assertLog(0.85, 0.00001, 0); assertLog(0.8, 0.00001, 0); assertLog(0.75, 0.00001, 0); // At this point the log function does not use high precision computation assertLog(0.7, 0.00001, 2); // Very hard: 4 * min^2 > |max^2 - 1| // Radius 0.99 assertLog(0.97, Math.sqrt(0.99 - 0.97 * 0.97), 0); // Radius 1.01 assertLog(0.97, Math.sqrt(1.01 - 0.97 * 0.97), 0); // Massive relative error // Radius 0.9999 assertLog(0.97, Math.sqrt(0.9999 - 0.97 * 0.97), 0); // polar numbers on a 1/8 circle with a magnitude close to 1. final int steps = 20; final double[] magnitude = {0.999, 1.0, 1.001}; final int[] ulps = {0, 0, 1}; for (int j = 0; j < magnitude.length; j++) { for (int i = 1; i <= steps; i++) { final double theta = i * Math.PI / (4 * steps); assertLog(magnitude[j] * Math.sin(theta), magnitude[j] * Math.cos(theta), ulps[j]); } } // cis numbers using an increasingly smaller angle double theta = Math.PI / (4 * steps); while (theta > 0) { theta /= 2; assertLog(Math.sin(theta), Math.cos(theta), 0); } // Extreme cases. final double up1 = Math.nextUp(1.0); final double down1 = Math.nextDown(1.0); assertLog(down1, Double.MIN_NORMAL, 0); assertLog(down1, Double.MIN_VALUE, 0); // No use of high-precision computation assertLog(up1, Double.MIN_NORMAL, 2); assertLog(up1, Double.MIN_VALUE, 2); // Add some cases known to fail without very high precision computation. // These have been found using randomly generated cis numbers and the // previous Dekker split-summation algorithm: // theta = rng.nextDouble() // x = Math.sin(theta) // y = Math.cos(theta) // Easy: <16 ulps with the Dekker summation assertLog(0.007640392270319105, 0.9999708117770016, 0); assertLog(0.40158433204881533, 0.9158220483548684, 0); assertLog(0.13258789214774552, 0.9911712520325727, 0); assertLog(0.2552206803398717, 0.9668828286441191, 0); // Hard: >1024 ulps with the Dekker summation assertLog(0.4650816500945186, 0.8852677892848919, 0); assertLog(0.06548693057069123, 0.9978534270745526, 0); assertLog(0.08223027214657339, 0.9966133564942327, 0); assertLog(0.06548693057069123, 0.9978534270745526, 0); assertLog(0.04590800199633988, 0.9989456718724518, 0); assertLog(0.3019636508581243, 0.9533194394118022, 0); } /** * Assert the Complex log function using BigDecimal to compute the field norm * {@code x*x + y*y} and then {@link Math#log1p(double)} to compute the log of * the modulus \ using {@code 0.5 * log1p(x*x + y*y - 1)}. This test is for the * extreme case for performance around {@code sqrt(x*x + y*y) = 1} where using * {@link Math#log(double)} will fail dramatically. * * @param x the real value of the complex * @param y the imaginary value of the complex * @param maxUlps the maximum units of least precision between the two values */ private static void assertLog(double x, double y, long maxUlps) { // Compute the best value we can final BigDecimal bx = new BigDecimal(x); final BigDecimal by = new BigDecimal(y); final BigDecimal exact = bx.multiply(bx).add(by.multiply(by)).subtract(BigDecimal.ONE); final double real = 0.5 * Math.log1p(exact.doubleValue()); final double imag = Math.atan2(y, x); assertComplex(x, y, "log", Complex::log, real, imag, maxUlps); } @Test void testSqrt() { final String name = "sqrt"; final UnaryOperator<Complex> operation = Complex::sqrt; // Test real/imaginary only numbers satisfy the definition using polar coordinates: // real = sqrt(abs()) * Math.cos(arg() / 2) // imag = sqrt(abs()) * Math.sin(arg() / 2) // However direct use of sin/cos will result in incorrect results due floating-point error. // This test asserts the on the closest result to the exact answer which is possible // if not using a simple polar computation. // Note: If this test fails in the set-up assertions it is due to a change in the // precision of java.util.Math. // For positive real-only the argument is +/-0. // For negative real-only the argument is +/-pi. Assertions.assertEquals(0, Math.atan2(0, 1)); Assertions.assertEquals(Math.PI, Math.atan2(0, -1)); // In both cases the trigonomic functions should be exact but // cos(pi/2) cannot be as pi/2 is not exact. final double cosArgRe = 1.0; final double sinArgRe = 0.0; Assertions.assertNotEquals(0.0, Math.cos(Math.PI / 2), "Expected cos(pi/2) to be non-zero"); Assertions.assertEquals(0.0, Math.cos(Math.PI / 2), 6.123233995736766e-17); // For imaginary-only the argument is Math.atan2(y, 0) = +/- pi/2. Assertions.assertEquals(Math.PI / 2, Math.atan2(1, 0)); Assertions.assertEquals(-Math.PI / 2, Math.atan2(-1, 0)); // There is 1 ULP difference in the result of cos/sin of pi/4. // It should be sqrt(2) / 2 for both. final double cosArgIm = Math.cos(Math.PI / 4); final double sinArgIm = Math.sin(Math.PI / 4); final double root2over2 = Math.sqrt(2) / 2; final double ulp = Math.ulp(cosArgIm); Assertions.assertNotEquals(cosArgIm, sinArgIm, "Expected cos(pi/4) to not exactly equal sin(pi/4)"); Assertions.assertEquals(root2over2, cosArgIm, 0, "Expected cos(pi/4) to be sqrt(2) / 2"); Assertions.assertEquals(root2over2, sinArgIm, ulp, "Expected sin(pi/4) to be 1 ulp from sqrt(2) / 2"); for (final double a : new double[] {0.5, 1.0, 1.2322, 345345.234523}) { final double rootA = Math.sqrt(a); assertComplex(a, 0, name, operation, rootA * cosArgRe, rootA * sinArgRe, 0); // This should be exact. It will fail if using the polar computation // real = sqrt(abs()) * Math.cos(arg() / 2) as cos(pi/2) is not 0.0 but 6.123233995736766e-17 assertComplex(-a, 0, name, operation, rootA * sinArgRe, rootA * cosArgRe, 0); // This should be exact. It won't be if Complex is using polar computation // with sin/cos which does not output the same result for angle pi/4. assertComplex(0, a, name, operation, rootA * root2over2, rootA * root2over2, 0); assertComplex(0, -a, name, operation, rootA * root2over2, -rootA * root2over2, 0); } // Check overflow safe. double a = Double.MAX_VALUE; final double b = a / 4; Assertions.assertEquals(inf, Complex.ofCartesian(a, b).abs(), "Expected overflow"); // The expected absolute value has been computed using BigDecimal on Java 9 //final double newAbs = new BigDecimal(a).multiply(new BigDecimal(a)).add( // new BigDecimal(b).multiply(new BigDecimal(b))) // .sqrt(MathContext.DECIMAL128).sqrt(MathContext.DECIMAL128).doubleValue() final double newAbs = 1.3612566508088272E154; assertComplex(a, b, name, operation, newAbs * Math.cos(0.5 * Math.atan2(b, a)), newAbs * Math.sin(0.5 * Math.atan2(b, a)), 3); assertComplex(b, a, name, operation, newAbs * Math.cos(0.5 * Math.atan2(a, b)), newAbs * Math.sin(0.5 * Math.atan2(a, b)), 2); // Note that the computation is possible in polar coords if abs() does not overflow. a = Double.MAX_VALUE / 2; assertComplex(-a, a, name, operation, 4.3145940638864765e+153, 1.0416351505169177e+154, 2); assertComplex(a, a, name, operation, 1.0416351505169177e+154, 4.3145940638864758e+153); assertComplex(-a, -a, name, operation, 4.3145940638864765e+153, -1.0416351505169177e+154, 2); assertComplex(a, -a, name, operation, 1.0416351505169177e+154, -4.3145940638864758e+153); // Check minimum normal value conditions // Computing in polar coords produces a very different result with // MIN_VALUE so use MIN_NORMAL a = Double.MIN_NORMAL; assertComplex(-a, a, name, operation, 6.7884304867749663e-155, 1.6388720948399111e-154); assertComplex(a, a, name, operation, 1.6388720948399111e-154, 6.7884304867749655e-155); assertComplex(-a, -a, name, operation, 6.7884304867749663e-155, -1.6388720948399111e-154); assertComplex(a, -a, name, operation, 1.6388720948399111e-154, -6.7884304867749655e-155); } // Note: inf/nan edge cases for // multiply/divide are tested in CStandardTest @Test void testDivide() { final String name = "divide"; final BiFunction<Complex, Complex, Complex> operation = Complex::divide; // Should be able to divide by a complex whose absolute (c*c+d*d) // overflows or underflows including all sub-normal numbers. // Worst case is using Double.MIN_VALUE // Should normalise c and d to range [1, 2) resulting in: // c = d = 1 // c * c + d * d = 2 // scaled x = (a * c + b * d) / denom = Double.MIN_VALUE // scaled y = (b * c - a * d) / denom = 0 // The values are rescaled by 1023 + 51 (shift the last bit of the 52 bit mantissa) double x = Math.scalb(Double.MIN_VALUE, 1023 + 51); Assertions.assertEquals(1.0, x); // In other words the result is (x+iy) / (x+iy) = (1+i0) // The result is the same if imaginary is zero (i.e. a real only divide) assertComplex(Double.MAX_VALUE, Double.MAX_VALUE, Double.MAX_VALUE, Double.MAX_VALUE, name, operation, 1.0, 0.0); assertComplex(Double.MAX_VALUE, 0.0, Double.MAX_VALUE, 0.0, name, operation, 1.0, 0.0); assertComplex(1.0, 1.0, 1.0, 1.0, name, operation, 1.0, 0.0); assertComplex(1.0, 0.0, 1.0, 0.0, name, operation, 1.0, 0.0); // Should work for all small values x = Double.MIN_NORMAL; while (x != 0) { assertComplex(x, x, x, x, name, operation, 1.0, 0.0); assertComplex(x, 0, x, 0, name, operation, 1.0, 0.0); x /= 2; } // Some cases of not self-divide assertComplex(1, 1, Double.MIN_VALUE, Double.MIN_VALUE, name, operation, inf, 0); // As computed by GNU g++ assertComplex(Double.MIN_NORMAL, Double.MIN_NORMAL, Double.MIN_VALUE, Double.MIN_VALUE, name, operation, 4503599627370496L, 0); assertComplex(Double.MIN_VALUE, Double.MIN_VALUE, Double.MIN_NORMAL, Double.MIN_NORMAL, name, operation, 2.2204460492503131e-16, 0); } @Test void testPow() { final String name = "pow"; final BiFunction<Complex, Complex, Complex> operation = Complex::pow; // pow(Complex) is log().multiply(Complex).exp() // All are overflow safe and handle infinities as defined in the C99 standard. // Test NaN assertComplex(1, 1, nan, nan, name, operation, nan, nan); assertComplex(nan, nan, 1, 1, name, operation, nan, nan); assertComplex(nan, 1, 1, 1, name, operation, nan, nan); assertComplex(1, nan, 1, 1, name, operation, nan, nan); assertComplex(1, 1, nan, 1, name, operation, nan, nan); assertComplex(1, 1, 1, nan, name, operation, nan, nan); // Test overflow. assertComplex(Double.MAX_VALUE, 1, 2, 2, name, operation, inf, -inf); assertComplex(1, Double.MAX_VALUE, 2, 2, name, operation, -inf, inf); } }
googleads/google-ads-java
36,156
google-ads-stubs-v19/src/main/java/com/google/ads/googleads/v19/errors/PolicyFindingDetails.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v19/errors/errors.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v19.errors; /** * <pre> * Error returned as part of a mutate response. * This error indicates one or more policy findings in the fields of a * resource. * </pre> * * Protobuf type {@code google.ads.googleads.v19.errors.PolicyFindingDetails} */ public final class PolicyFindingDetails extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v19.errors.PolicyFindingDetails) PolicyFindingDetailsOrBuilder { private static final long serialVersionUID = 0L; // Use PolicyFindingDetails.newBuilder() to construct. private PolicyFindingDetails(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private PolicyFindingDetails() { policyTopicEntries_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new PolicyFindingDetails(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.errors.ErrorsProto.internal_static_google_ads_googleads_v19_errors_PolicyFindingDetails_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.errors.ErrorsProto.internal_static_google_ads_googleads_v19_errors_PolicyFindingDetails_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.errors.PolicyFindingDetails.class, com.google.ads.googleads.v19.errors.PolicyFindingDetails.Builder.class); } public static final int POLICY_TOPIC_ENTRIES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.ads.googleads.v19.common.PolicyTopicEntry> policyTopicEntries_; /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v19.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ @java.lang.Override public java.util.List<com.google.ads.googleads.v19.common.PolicyTopicEntry> getPolicyTopicEntriesList() { return policyTopicEntries_; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v19.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.ads.googleads.v19.common.PolicyTopicEntryOrBuilder> getPolicyTopicEntriesOrBuilderList() { return policyTopicEntries_; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v19.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ @java.lang.Override public int getPolicyTopicEntriesCount() { return policyTopicEntries_.size(); } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v19.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v19.common.PolicyTopicEntry getPolicyTopicEntries(int index) { return policyTopicEntries_.get(index); } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v19.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v19.common.PolicyTopicEntryOrBuilder getPolicyTopicEntriesOrBuilder( int index) { return policyTopicEntries_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < policyTopicEntries_.size(); i++) { output.writeMessage(1, policyTopicEntries_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < policyTopicEntries_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, policyTopicEntries_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v19.errors.PolicyFindingDetails)) { return super.equals(obj); } com.google.ads.googleads.v19.errors.PolicyFindingDetails other = (com.google.ads.googleads.v19.errors.PolicyFindingDetails) obj; if (!getPolicyTopicEntriesList() .equals(other.getPolicyTopicEntriesList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getPolicyTopicEntriesCount() > 0) { hash = (37 * hash) + POLICY_TOPIC_ENTRIES_FIELD_NUMBER; hash = (53 * hash) + getPolicyTopicEntriesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v19.errors.PolicyFindingDetails parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.errors.PolicyFindingDetails parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.errors.PolicyFindingDetails parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.errors.PolicyFindingDetails parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.errors.PolicyFindingDetails parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v19.errors.PolicyFindingDetails parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v19.errors.PolicyFindingDetails parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.errors.PolicyFindingDetails parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.errors.PolicyFindingDetails parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.errors.PolicyFindingDetails parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v19.errors.PolicyFindingDetails parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v19.errors.PolicyFindingDetails parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v19.errors.PolicyFindingDetails prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Error returned as part of a mutate response. * This error indicates one or more policy findings in the fields of a * resource. * </pre> * * Protobuf type {@code google.ads.googleads.v19.errors.PolicyFindingDetails} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v19.errors.PolicyFindingDetails) com.google.ads.googleads.v19.errors.PolicyFindingDetailsOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v19.errors.ErrorsProto.internal_static_google_ads_googleads_v19_errors_PolicyFindingDetails_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v19.errors.ErrorsProto.internal_static_google_ads_googleads_v19_errors_PolicyFindingDetails_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v19.errors.PolicyFindingDetails.class, com.google.ads.googleads.v19.errors.PolicyFindingDetails.Builder.class); } // Construct using com.google.ads.googleads.v19.errors.PolicyFindingDetails.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (policyTopicEntriesBuilder_ == null) { policyTopicEntries_ = java.util.Collections.emptyList(); } else { policyTopicEntries_ = null; policyTopicEntriesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v19.errors.ErrorsProto.internal_static_google_ads_googleads_v19_errors_PolicyFindingDetails_descriptor; } @java.lang.Override public com.google.ads.googleads.v19.errors.PolicyFindingDetails getDefaultInstanceForType() { return com.google.ads.googleads.v19.errors.PolicyFindingDetails.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v19.errors.PolicyFindingDetails build() { com.google.ads.googleads.v19.errors.PolicyFindingDetails result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v19.errors.PolicyFindingDetails buildPartial() { com.google.ads.googleads.v19.errors.PolicyFindingDetails result = new com.google.ads.googleads.v19.errors.PolicyFindingDetails(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(com.google.ads.googleads.v19.errors.PolicyFindingDetails result) { if (policyTopicEntriesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { policyTopicEntries_ = java.util.Collections.unmodifiableList(policyTopicEntries_); bitField0_ = (bitField0_ & ~0x00000001); } result.policyTopicEntries_ = policyTopicEntries_; } else { result.policyTopicEntries_ = policyTopicEntriesBuilder_.build(); } } private void buildPartial0(com.google.ads.googleads.v19.errors.PolicyFindingDetails result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v19.errors.PolicyFindingDetails) { return mergeFrom((com.google.ads.googleads.v19.errors.PolicyFindingDetails)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v19.errors.PolicyFindingDetails other) { if (other == com.google.ads.googleads.v19.errors.PolicyFindingDetails.getDefaultInstance()) return this; if (policyTopicEntriesBuilder_ == null) { if (!other.policyTopicEntries_.isEmpty()) { if (policyTopicEntries_.isEmpty()) { policyTopicEntries_ = other.policyTopicEntries_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensurePolicyTopicEntriesIsMutable(); policyTopicEntries_.addAll(other.policyTopicEntries_); } onChanged(); } } else { if (!other.policyTopicEntries_.isEmpty()) { if (policyTopicEntriesBuilder_.isEmpty()) { policyTopicEntriesBuilder_.dispose(); policyTopicEntriesBuilder_ = null; policyTopicEntries_ = other.policyTopicEntries_; bitField0_ = (bitField0_ & ~0x00000001); policyTopicEntriesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getPolicyTopicEntriesFieldBuilder() : null; } else { policyTopicEntriesBuilder_.addAllMessages(other.policyTopicEntries_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.ads.googleads.v19.common.PolicyTopicEntry m = input.readMessage( com.google.ads.googleads.v19.common.PolicyTopicEntry.parser(), extensionRegistry); if (policyTopicEntriesBuilder_ == null) { ensurePolicyTopicEntriesIsMutable(); policyTopicEntries_.add(m); } else { policyTopicEntriesBuilder_.addMessage(m); } break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.ads.googleads.v19.common.PolicyTopicEntry> policyTopicEntries_ = java.util.Collections.emptyList(); private void ensurePolicyTopicEntriesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { policyTopicEntries_ = new java.util.ArrayList<com.google.ads.googleads.v19.common.PolicyTopicEntry>(policyTopicEntries_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v19.common.PolicyTopicEntry, com.google.ads.googleads.v19.common.PolicyTopicEntry.Builder, com.google.ads.googleads.v19.common.PolicyTopicEntryOrBuilder> policyTopicEntriesBuilder_; /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v19.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public java.util.List<com.google.ads.googleads.v19.common.PolicyTopicEntry> getPolicyTopicEntriesList() { if (policyTopicEntriesBuilder_ == null) { return java.util.Collections.unmodifiableList(policyTopicEntries_); } else { return policyTopicEntriesBuilder_.getMessageList(); } } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v19.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public int getPolicyTopicEntriesCount() { if (policyTopicEntriesBuilder_ == null) { return policyTopicEntries_.size(); } else { return policyTopicEntriesBuilder_.getCount(); } } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v19.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public com.google.ads.googleads.v19.common.PolicyTopicEntry getPolicyTopicEntries(int index) { if (policyTopicEntriesBuilder_ == null) { return policyTopicEntries_.get(index); } else { return policyTopicEntriesBuilder_.getMessage(index); } } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v19.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public Builder setPolicyTopicEntries( int index, com.google.ads.googleads.v19.common.PolicyTopicEntry value) { if (policyTopicEntriesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePolicyTopicEntriesIsMutable(); policyTopicEntries_.set(index, value); onChanged(); } else { policyTopicEntriesBuilder_.setMessage(index, value); } return this; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v19.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public Builder setPolicyTopicEntries( int index, com.google.ads.googleads.v19.common.PolicyTopicEntry.Builder builderForValue) { if (policyTopicEntriesBuilder_ == null) { ensurePolicyTopicEntriesIsMutable(); policyTopicEntries_.set(index, builderForValue.build()); onChanged(); } else { policyTopicEntriesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v19.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public Builder addPolicyTopicEntries(com.google.ads.googleads.v19.common.PolicyTopicEntry value) { if (policyTopicEntriesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePolicyTopicEntriesIsMutable(); policyTopicEntries_.add(value); onChanged(); } else { policyTopicEntriesBuilder_.addMessage(value); } return this; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v19.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public Builder addPolicyTopicEntries( int index, com.google.ads.googleads.v19.common.PolicyTopicEntry value) { if (policyTopicEntriesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePolicyTopicEntriesIsMutable(); policyTopicEntries_.add(index, value); onChanged(); } else { policyTopicEntriesBuilder_.addMessage(index, value); } return this; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v19.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public Builder addPolicyTopicEntries( com.google.ads.googleads.v19.common.PolicyTopicEntry.Builder builderForValue) { if (policyTopicEntriesBuilder_ == null) { ensurePolicyTopicEntriesIsMutable(); policyTopicEntries_.add(builderForValue.build()); onChanged(); } else { policyTopicEntriesBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v19.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public Builder addPolicyTopicEntries( int index, com.google.ads.googleads.v19.common.PolicyTopicEntry.Builder builderForValue) { if (policyTopicEntriesBuilder_ == null) { ensurePolicyTopicEntriesIsMutable(); policyTopicEntries_.add(index, builderForValue.build()); onChanged(); } else { policyTopicEntriesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v19.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public Builder addAllPolicyTopicEntries( java.lang.Iterable<? extends com.google.ads.googleads.v19.common.PolicyTopicEntry> values) { if (policyTopicEntriesBuilder_ == null) { ensurePolicyTopicEntriesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, policyTopicEntries_); onChanged(); } else { policyTopicEntriesBuilder_.addAllMessages(values); } return this; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v19.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public Builder clearPolicyTopicEntries() { if (policyTopicEntriesBuilder_ == null) { policyTopicEntries_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { policyTopicEntriesBuilder_.clear(); } return this; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v19.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public Builder removePolicyTopicEntries(int index) { if (policyTopicEntriesBuilder_ == null) { ensurePolicyTopicEntriesIsMutable(); policyTopicEntries_.remove(index); onChanged(); } else { policyTopicEntriesBuilder_.remove(index); } return this; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v19.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public com.google.ads.googleads.v19.common.PolicyTopicEntry.Builder getPolicyTopicEntriesBuilder( int index) { return getPolicyTopicEntriesFieldBuilder().getBuilder(index); } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v19.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public com.google.ads.googleads.v19.common.PolicyTopicEntryOrBuilder getPolicyTopicEntriesOrBuilder( int index) { if (policyTopicEntriesBuilder_ == null) { return policyTopicEntries_.get(index); } else { return policyTopicEntriesBuilder_.getMessageOrBuilder(index); } } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v19.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public java.util.List<? extends com.google.ads.googleads.v19.common.PolicyTopicEntryOrBuilder> getPolicyTopicEntriesOrBuilderList() { if (policyTopicEntriesBuilder_ != null) { return policyTopicEntriesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(policyTopicEntries_); } } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v19.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public com.google.ads.googleads.v19.common.PolicyTopicEntry.Builder addPolicyTopicEntriesBuilder() { return getPolicyTopicEntriesFieldBuilder().addBuilder( com.google.ads.googleads.v19.common.PolicyTopicEntry.getDefaultInstance()); } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v19.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public com.google.ads.googleads.v19.common.PolicyTopicEntry.Builder addPolicyTopicEntriesBuilder( int index) { return getPolicyTopicEntriesFieldBuilder().addBuilder( index, com.google.ads.googleads.v19.common.PolicyTopicEntry.getDefaultInstance()); } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v19.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public java.util.List<com.google.ads.googleads.v19.common.PolicyTopicEntry.Builder> getPolicyTopicEntriesBuilderList() { return getPolicyTopicEntriesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v19.common.PolicyTopicEntry, com.google.ads.googleads.v19.common.PolicyTopicEntry.Builder, com.google.ads.googleads.v19.common.PolicyTopicEntryOrBuilder> getPolicyTopicEntriesFieldBuilder() { if (policyTopicEntriesBuilder_ == null) { policyTopicEntriesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v19.common.PolicyTopicEntry, com.google.ads.googleads.v19.common.PolicyTopicEntry.Builder, com.google.ads.googleads.v19.common.PolicyTopicEntryOrBuilder>( policyTopicEntries_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); policyTopicEntries_ = null; } return policyTopicEntriesBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v19.errors.PolicyFindingDetails) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v19.errors.PolicyFindingDetails) private static final com.google.ads.googleads.v19.errors.PolicyFindingDetails DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v19.errors.PolicyFindingDetails(); } public static com.google.ads.googleads.v19.errors.PolicyFindingDetails getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<PolicyFindingDetails> PARSER = new com.google.protobuf.AbstractParser<PolicyFindingDetails>() { @java.lang.Override public PolicyFindingDetails parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<PolicyFindingDetails> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<PolicyFindingDetails> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v19.errors.PolicyFindingDetails getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
36,156
google-ads-stubs-v20/src/main/java/com/google/ads/googleads/v20/errors/PolicyFindingDetails.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v20/errors/errors.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v20.errors; /** * <pre> * Error returned as part of a mutate response. * This error indicates one or more policy findings in the fields of a * resource. * </pre> * * Protobuf type {@code google.ads.googleads.v20.errors.PolicyFindingDetails} */ public final class PolicyFindingDetails extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v20.errors.PolicyFindingDetails) PolicyFindingDetailsOrBuilder { private static final long serialVersionUID = 0L; // Use PolicyFindingDetails.newBuilder() to construct. private PolicyFindingDetails(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private PolicyFindingDetails() { policyTopicEntries_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new PolicyFindingDetails(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.errors.ErrorsProto.internal_static_google_ads_googleads_v20_errors_PolicyFindingDetails_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.errors.ErrorsProto.internal_static_google_ads_googleads_v20_errors_PolicyFindingDetails_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.errors.PolicyFindingDetails.class, com.google.ads.googleads.v20.errors.PolicyFindingDetails.Builder.class); } public static final int POLICY_TOPIC_ENTRIES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.ads.googleads.v20.common.PolicyTopicEntry> policyTopicEntries_; /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v20.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ @java.lang.Override public java.util.List<com.google.ads.googleads.v20.common.PolicyTopicEntry> getPolicyTopicEntriesList() { return policyTopicEntries_; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v20.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.ads.googleads.v20.common.PolicyTopicEntryOrBuilder> getPolicyTopicEntriesOrBuilderList() { return policyTopicEntries_; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v20.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ @java.lang.Override public int getPolicyTopicEntriesCount() { return policyTopicEntries_.size(); } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v20.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v20.common.PolicyTopicEntry getPolicyTopicEntries(int index) { return policyTopicEntries_.get(index); } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v20.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v20.common.PolicyTopicEntryOrBuilder getPolicyTopicEntriesOrBuilder( int index) { return policyTopicEntries_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < policyTopicEntries_.size(); i++) { output.writeMessage(1, policyTopicEntries_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < policyTopicEntries_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, policyTopicEntries_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v20.errors.PolicyFindingDetails)) { return super.equals(obj); } com.google.ads.googleads.v20.errors.PolicyFindingDetails other = (com.google.ads.googleads.v20.errors.PolicyFindingDetails) obj; if (!getPolicyTopicEntriesList() .equals(other.getPolicyTopicEntriesList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getPolicyTopicEntriesCount() > 0) { hash = (37 * hash) + POLICY_TOPIC_ENTRIES_FIELD_NUMBER; hash = (53 * hash) + getPolicyTopicEntriesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v20.errors.PolicyFindingDetails parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.errors.PolicyFindingDetails parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.errors.PolicyFindingDetails parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.errors.PolicyFindingDetails parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.errors.PolicyFindingDetails parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v20.errors.PolicyFindingDetails parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v20.errors.PolicyFindingDetails parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.errors.PolicyFindingDetails parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.errors.PolicyFindingDetails parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.errors.PolicyFindingDetails parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v20.errors.PolicyFindingDetails parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v20.errors.PolicyFindingDetails parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v20.errors.PolicyFindingDetails prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Error returned as part of a mutate response. * This error indicates one or more policy findings in the fields of a * resource. * </pre> * * Protobuf type {@code google.ads.googleads.v20.errors.PolicyFindingDetails} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v20.errors.PolicyFindingDetails) com.google.ads.googleads.v20.errors.PolicyFindingDetailsOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v20.errors.ErrorsProto.internal_static_google_ads_googleads_v20_errors_PolicyFindingDetails_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v20.errors.ErrorsProto.internal_static_google_ads_googleads_v20_errors_PolicyFindingDetails_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v20.errors.PolicyFindingDetails.class, com.google.ads.googleads.v20.errors.PolicyFindingDetails.Builder.class); } // Construct using com.google.ads.googleads.v20.errors.PolicyFindingDetails.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (policyTopicEntriesBuilder_ == null) { policyTopicEntries_ = java.util.Collections.emptyList(); } else { policyTopicEntries_ = null; policyTopicEntriesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v20.errors.ErrorsProto.internal_static_google_ads_googleads_v20_errors_PolicyFindingDetails_descriptor; } @java.lang.Override public com.google.ads.googleads.v20.errors.PolicyFindingDetails getDefaultInstanceForType() { return com.google.ads.googleads.v20.errors.PolicyFindingDetails.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v20.errors.PolicyFindingDetails build() { com.google.ads.googleads.v20.errors.PolicyFindingDetails result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v20.errors.PolicyFindingDetails buildPartial() { com.google.ads.googleads.v20.errors.PolicyFindingDetails result = new com.google.ads.googleads.v20.errors.PolicyFindingDetails(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(com.google.ads.googleads.v20.errors.PolicyFindingDetails result) { if (policyTopicEntriesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { policyTopicEntries_ = java.util.Collections.unmodifiableList(policyTopicEntries_); bitField0_ = (bitField0_ & ~0x00000001); } result.policyTopicEntries_ = policyTopicEntries_; } else { result.policyTopicEntries_ = policyTopicEntriesBuilder_.build(); } } private void buildPartial0(com.google.ads.googleads.v20.errors.PolicyFindingDetails result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v20.errors.PolicyFindingDetails) { return mergeFrom((com.google.ads.googleads.v20.errors.PolicyFindingDetails)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v20.errors.PolicyFindingDetails other) { if (other == com.google.ads.googleads.v20.errors.PolicyFindingDetails.getDefaultInstance()) return this; if (policyTopicEntriesBuilder_ == null) { if (!other.policyTopicEntries_.isEmpty()) { if (policyTopicEntries_.isEmpty()) { policyTopicEntries_ = other.policyTopicEntries_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensurePolicyTopicEntriesIsMutable(); policyTopicEntries_.addAll(other.policyTopicEntries_); } onChanged(); } } else { if (!other.policyTopicEntries_.isEmpty()) { if (policyTopicEntriesBuilder_.isEmpty()) { policyTopicEntriesBuilder_.dispose(); policyTopicEntriesBuilder_ = null; policyTopicEntries_ = other.policyTopicEntries_; bitField0_ = (bitField0_ & ~0x00000001); policyTopicEntriesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getPolicyTopicEntriesFieldBuilder() : null; } else { policyTopicEntriesBuilder_.addAllMessages(other.policyTopicEntries_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.ads.googleads.v20.common.PolicyTopicEntry m = input.readMessage( com.google.ads.googleads.v20.common.PolicyTopicEntry.parser(), extensionRegistry); if (policyTopicEntriesBuilder_ == null) { ensurePolicyTopicEntriesIsMutable(); policyTopicEntries_.add(m); } else { policyTopicEntriesBuilder_.addMessage(m); } break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.ads.googleads.v20.common.PolicyTopicEntry> policyTopicEntries_ = java.util.Collections.emptyList(); private void ensurePolicyTopicEntriesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { policyTopicEntries_ = new java.util.ArrayList<com.google.ads.googleads.v20.common.PolicyTopicEntry>(policyTopicEntries_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v20.common.PolicyTopicEntry, com.google.ads.googleads.v20.common.PolicyTopicEntry.Builder, com.google.ads.googleads.v20.common.PolicyTopicEntryOrBuilder> policyTopicEntriesBuilder_; /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v20.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public java.util.List<com.google.ads.googleads.v20.common.PolicyTopicEntry> getPolicyTopicEntriesList() { if (policyTopicEntriesBuilder_ == null) { return java.util.Collections.unmodifiableList(policyTopicEntries_); } else { return policyTopicEntriesBuilder_.getMessageList(); } } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v20.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public int getPolicyTopicEntriesCount() { if (policyTopicEntriesBuilder_ == null) { return policyTopicEntries_.size(); } else { return policyTopicEntriesBuilder_.getCount(); } } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v20.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public com.google.ads.googleads.v20.common.PolicyTopicEntry getPolicyTopicEntries(int index) { if (policyTopicEntriesBuilder_ == null) { return policyTopicEntries_.get(index); } else { return policyTopicEntriesBuilder_.getMessage(index); } } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v20.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public Builder setPolicyTopicEntries( int index, com.google.ads.googleads.v20.common.PolicyTopicEntry value) { if (policyTopicEntriesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePolicyTopicEntriesIsMutable(); policyTopicEntries_.set(index, value); onChanged(); } else { policyTopicEntriesBuilder_.setMessage(index, value); } return this; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v20.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public Builder setPolicyTopicEntries( int index, com.google.ads.googleads.v20.common.PolicyTopicEntry.Builder builderForValue) { if (policyTopicEntriesBuilder_ == null) { ensurePolicyTopicEntriesIsMutable(); policyTopicEntries_.set(index, builderForValue.build()); onChanged(); } else { policyTopicEntriesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v20.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public Builder addPolicyTopicEntries(com.google.ads.googleads.v20.common.PolicyTopicEntry value) { if (policyTopicEntriesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePolicyTopicEntriesIsMutable(); policyTopicEntries_.add(value); onChanged(); } else { policyTopicEntriesBuilder_.addMessage(value); } return this; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v20.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public Builder addPolicyTopicEntries( int index, com.google.ads.googleads.v20.common.PolicyTopicEntry value) { if (policyTopicEntriesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePolicyTopicEntriesIsMutable(); policyTopicEntries_.add(index, value); onChanged(); } else { policyTopicEntriesBuilder_.addMessage(index, value); } return this; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v20.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public Builder addPolicyTopicEntries( com.google.ads.googleads.v20.common.PolicyTopicEntry.Builder builderForValue) { if (policyTopicEntriesBuilder_ == null) { ensurePolicyTopicEntriesIsMutable(); policyTopicEntries_.add(builderForValue.build()); onChanged(); } else { policyTopicEntriesBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v20.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public Builder addPolicyTopicEntries( int index, com.google.ads.googleads.v20.common.PolicyTopicEntry.Builder builderForValue) { if (policyTopicEntriesBuilder_ == null) { ensurePolicyTopicEntriesIsMutable(); policyTopicEntries_.add(index, builderForValue.build()); onChanged(); } else { policyTopicEntriesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v20.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public Builder addAllPolicyTopicEntries( java.lang.Iterable<? extends com.google.ads.googleads.v20.common.PolicyTopicEntry> values) { if (policyTopicEntriesBuilder_ == null) { ensurePolicyTopicEntriesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, policyTopicEntries_); onChanged(); } else { policyTopicEntriesBuilder_.addAllMessages(values); } return this; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v20.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public Builder clearPolicyTopicEntries() { if (policyTopicEntriesBuilder_ == null) { policyTopicEntries_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { policyTopicEntriesBuilder_.clear(); } return this; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v20.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public Builder removePolicyTopicEntries(int index) { if (policyTopicEntriesBuilder_ == null) { ensurePolicyTopicEntriesIsMutable(); policyTopicEntries_.remove(index); onChanged(); } else { policyTopicEntriesBuilder_.remove(index); } return this; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v20.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public com.google.ads.googleads.v20.common.PolicyTopicEntry.Builder getPolicyTopicEntriesBuilder( int index) { return getPolicyTopicEntriesFieldBuilder().getBuilder(index); } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v20.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public com.google.ads.googleads.v20.common.PolicyTopicEntryOrBuilder getPolicyTopicEntriesOrBuilder( int index) { if (policyTopicEntriesBuilder_ == null) { return policyTopicEntries_.get(index); } else { return policyTopicEntriesBuilder_.getMessageOrBuilder(index); } } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v20.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public java.util.List<? extends com.google.ads.googleads.v20.common.PolicyTopicEntryOrBuilder> getPolicyTopicEntriesOrBuilderList() { if (policyTopicEntriesBuilder_ != null) { return policyTopicEntriesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(policyTopicEntries_); } } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v20.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public com.google.ads.googleads.v20.common.PolicyTopicEntry.Builder addPolicyTopicEntriesBuilder() { return getPolicyTopicEntriesFieldBuilder().addBuilder( com.google.ads.googleads.v20.common.PolicyTopicEntry.getDefaultInstance()); } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v20.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public com.google.ads.googleads.v20.common.PolicyTopicEntry.Builder addPolicyTopicEntriesBuilder( int index) { return getPolicyTopicEntriesFieldBuilder().addBuilder( index, com.google.ads.googleads.v20.common.PolicyTopicEntry.getDefaultInstance()); } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v20.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public java.util.List<com.google.ads.googleads.v20.common.PolicyTopicEntry.Builder> getPolicyTopicEntriesBuilderList() { return getPolicyTopicEntriesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v20.common.PolicyTopicEntry, com.google.ads.googleads.v20.common.PolicyTopicEntry.Builder, com.google.ads.googleads.v20.common.PolicyTopicEntryOrBuilder> getPolicyTopicEntriesFieldBuilder() { if (policyTopicEntriesBuilder_ == null) { policyTopicEntriesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v20.common.PolicyTopicEntry, com.google.ads.googleads.v20.common.PolicyTopicEntry.Builder, com.google.ads.googleads.v20.common.PolicyTopicEntryOrBuilder>( policyTopicEntries_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); policyTopicEntries_ = null; } return policyTopicEntriesBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v20.errors.PolicyFindingDetails) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v20.errors.PolicyFindingDetails) private static final com.google.ads.googleads.v20.errors.PolicyFindingDetails DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v20.errors.PolicyFindingDetails(); } public static com.google.ads.googleads.v20.errors.PolicyFindingDetails getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<PolicyFindingDetails> PARSER = new com.google.protobuf.AbstractParser<PolicyFindingDetails>() { @java.lang.Override public PolicyFindingDetails parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<PolicyFindingDetails> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<PolicyFindingDetails> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v20.errors.PolicyFindingDetails getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleads/google-ads-java
36,156
google-ads-stubs-v21/src/main/java/com/google/ads/googleads/v21/errors/PolicyFindingDetails.java
// Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/ads/googleads/v21/errors/errors.proto // Protobuf Java Version: 3.25.7 package com.google.ads.googleads.v21.errors; /** * <pre> * Error returned as part of a mutate response. * This error indicates one or more policy findings in the fields of a * resource. * </pre> * * Protobuf type {@code google.ads.googleads.v21.errors.PolicyFindingDetails} */ public final class PolicyFindingDetails extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.ads.googleads.v21.errors.PolicyFindingDetails) PolicyFindingDetailsOrBuilder { private static final long serialVersionUID = 0L; // Use PolicyFindingDetails.newBuilder() to construct. private PolicyFindingDetails(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private PolicyFindingDetails() { policyTopicEntries_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance( UnusedPrivateParameter unused) { return new PolicyFindingDetails(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.errors.ErrorsProto.internal_static_google_ads_googleads_v21_errors_PolicyFindingDetails_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.errors.ErrorsProto.internal_static_google_ads_googleads_v21_errors_PolicyFindingDetails_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.errors.PolicyFindingDetails.class, com.google.ads.googleads.v21.errors.PolicyFindingDetails.Builder.class); } public static final int POLICY_TOPIC_ENTRIES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.ads.googleads.v21.common.PolicyTopicEntry> policyTopicEntries_; /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v21.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ @java.lang.Override public java.util.List<com.google.ads.googleads.v21.common.PolicyTopicEntry> getPolicyTopicEntriesList() { return policyTopicEntries_; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v21.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.ads.googleads.v21.common.PolicyTopicEntryOrBuilder> getPolicyTopicEntriesOrBuilderList() { return policyTopicEntries_; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v21.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ @java.lang.Override public int getPolicyTopicEntriesCount() { return policyTopicEntries_.size(); } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v21.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v21.common.PolicyTopicEntry getPolicyTopicEntries(int index) { return policyTopicEntries_.get(index); } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v21.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ @java.lang.Override public com.google.ads.googleads.v21.common.PolicyTopicEntryOrBuilder getPolicyTopicEntriesOrBuilder( int index) { return policyTopicEntries_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < policyTopicEntries_.size(); i++) { output.writeMessage(1, policyTopicEntries_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < policyTopicEntries_.size(); i++) { size += com.google.protobuf.CodedOutputStream .computeMessageSize(1, policyTopicEntries_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.ads.googleads.v21.errors.PolicyFindingDetails)) { return super.equals(obj); } com.google.ads.googleads.v21.errors.PolicyFindingDetails other = (com.google.ads.googleads.v21.errors.PolicyFindingDetails) obj; if (!getPolicyTopicEntriesList() .equals(other.getPolicyTopicEntriesList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getPolicyTopicEntriesCount() > 0) { hash = (37 * hash) + POLICY_TOPIC_ENTRIES_FIELD_NUMBER; hash = (53 * hash) + getPolicyTopicEntriesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.ads.googleads.v21.errors.PolicyFindingDetails parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.errors.PolicyFindingDetails parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.errors.PolicyFindingDetails parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.errors.PolicyFindingDetails parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.errors.PolicyFindingDetails parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.ads.googleads.v21.errors.PolicyFindingDetails parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.ads.googleads.v21.errors.PolicyFindingDetails parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.errors.PolicyFindingDetails parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.errors.PolicyFindingDetails parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.errors.PolicyFindingDetails parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseDelimitedWithIOException(PARSER, input, extensionRegistry); } public static com.google.ads.googleads.v21.errors.PolicyFindingDetails parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input); } public static com.google.ads.googleads.v21.errors.PolicyFindingDetails parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3 .parseWithIOException(PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.ads.googleads.v21.errors.PolicyFindingDetails prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * <pre> * Error returned as part of a mutate response. * This error indicates one or more policy findings in the fields of a * resource. * </pre> * * Protobuf type {@code google.ads.googleads.v21.errors.PolicyFindingDetails} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.ads.googleads.v21.errors.PolicyFindingDetails) com.google.ads.googleads.v21.errors.PolicyFindingDetailsOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.ads.googleads.v21.errors.ErrorsProto.internal_static_google_ads_googleads_v21_errors_PolicyFindingDetails_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.ads.googleads.v21.errors.ErrorsProto.internal_static_google_ads_googleads_v21_errors_PolicyFindingDetails_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.ads.googleads.v21.errors.PolicyFindingDetails.class, com.google.ads.googleads.v21.errors.PolicyFindingDetails.Builder.class); } // Construct using com.google.ads.googleads.v21.errors.PolicyFindingDetails.newBuilder() private Builder() { } private Builder( com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (policyTopicEntriesBuilder_ == null) { policyTopicEntries_ = java.util.Collections.emptyList(); } else { policyTopicEntries_ = null; policyTopicEntriesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.ads.googleads.v21.errors.ErrorsProto.internal_static_google_ads_googleads_v21_errors_PolicyFindingDetails_descriptor; } @java.lang.Override public com.google.ads.googleads.v21.errors.PolicyFindingDetails getDefaultInstanceForType() { return com.google.ads.googleads.v21.errors.PolicyFindingDetails.getDefaultInstance(); } @java.lang.Override public com.google.ads.googleads.v21.errors.PolicyFindingDetails build() { com.google.ads.googleads.v21.errors.PolicyFindingDetails result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.ads.googleads.v21.errors.PolicyFindingDetails buildPartial() { com.google.ads.googleads.v21.errors.PolicyFindingDetails result = new com.google.ads.googleads.v21.errors.PolicyFindingDetails(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields(com.google.ads.googleads.v21.errors.PolicyFindingDetails result) { if (policyTopicEntriesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { policyTopicEntries_ = java.util.Collections.unmodifiableList(policyTopicEntries_); bitField0_ = (bitField0_ & ~0x00000001); } result.policyTopicEntries_ = policyTopicEntries_; } else { result.policyTopicEntries_ = policyTopicEntriesBuilder_.build(); } } private void buildPartial0(com.google.ads.googleads.v21.errors.PolicyFindingDetails result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField( com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof( com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.ads.googleads.v21.errors.PolicyFindingDetails) { return mergeFrom((com.google.ads.googleads.v21.errors.PolicyFindingDetails)other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.ads.googleads.v21.errors.PolicyFindingDetails other) { if (other == com.google.ads.googleads.v21.errors.PolicyFindingDetails.getDefaultInstance()) return this; if (policyTopicEntriesBuilder_ == null) { if (!other.policyTopicEntries_.isEmpty()) { if (policyTopicEntries_.isEmpty()) { policyTopicEntries_ = other.policyTopicEntries_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensurePolicyTopicEntriesIsMutable(); policyTopicEntries_.addAll(other.policyTopicEntries_); } onChanged(); } } else { if (!other.policyTopicEntries_.isEmpty()) { if (policyTopicEntriesBuilder_.isEmpty()) { policyTopicEntriesBuilder_.dispose(); policyTopicEntriesBuilder_ = null; policyTopicEntries_ = other.policyTopicEntries_; bitField0_ = (bitField0_ & ~0x00000001); policyTopicEntriesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getPolicyTopicEntriesFieldBuilder() : null; } else { policyTopicEntriesBuilder_.addAllMessages(other.policyTopicEntries_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.ads.googleads.v21.common.PolicyTopicEntry m = input.readMessage( com.google.ads.googleads.v21.common.PolicyTopicEntry.parser(), extensionRegistry); if (policyTopicEntriesBuilder_ == null) { ensurePolicyTopicEntriesIsMutable(); policyTopicEntries_.add(m); } else { policyTopicEntriesBuilder_.addMessage(m); } break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.ads.googleads.v21.common.PolicyTopicEntry> policyTopicEntries_ = java.util.Collections.emptyList(); private void ensurePolicyTopicEntriesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { policyTopicEntries_ = new java.util.ArrayList<com.google.ads.googleads.v21.common.PolicyTopicEntry>(policyTopicEntries_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v21.common.PolicyTopicEntry, com.google.ads.googleads.v21.common.PolicyTopicEntry.Builder, com.google.ads.googleads.v21.common.PolicyTopicEntryOrBuilder> policyTopicEntriesBuilder_; /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v21.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public java.util.List<com.google.ads.googleads.v21.common.PolicyTopicEntry> getPolicyTopicEntriesList() { if (policyTopicEntriesBuilder_ == null) { return java.util.Collections.unmodifiableList(policyTopicEntries_); } else { return policyTopicEntriesBuilder_.getMessageList(); } } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v21.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public int getPolicyTopicEntriesCount() { if (policyTopicEntriesBuilder_ == null) { return policyTopicEntries_.size(); } else { return policyTopicEntriesBuilder_.getCount(); } } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v21.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public com.google.ads.googleads.v21.common.PolicyTopicEntry getPolicyTopicEntries(int index) { if (policyTopicEntriesBuilder_ == null) { return policyTopicEntries_.get(index); } else { return policyTopicEntriesBuilder_.getMessage(index); } } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v21.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public Builder setPolicyTopicEntries( int index, com.google.ads.googleads.v21.common.PolicyTopicEntry value) { if (policyTopicEntriesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePolicyTopicEntriesIsMutable(); policyTopicEntries_.set(index, value); onChanged(); } else { policyTopicEntriesBuilder_.setMessage(index, value); } return this; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v21.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public Builder setPolicyTopicEntries( int index, com.google.ads.googleads.v21.common.PolicyTopicEntry.Builder builderForValue) { if (policyTopicEntriesBuilder_ == null) { ensurePolicyTopicEntriesIsMutable(); policyTopicEntries_.set(index, builderForValue.build()); onChanged(); } else { policyTopicEntriesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v21.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public Builder addPolicyTopicEntries(com.google.ads.googleads.v21.common.PolicyTopicEntry value) { if (policyTopicEntriesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePolicyTopicEntriesIsMutable(); policyTopicEntries_.add(value); onChanged(); } else { policyTopicEntriesBuilder_.addMessage(value); } return this; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v21.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public Builder addPolicyTopicEntries( int index, com.google.ads.googleads.v21.common.PolicyTopicEntry value) { if (policyTopicEntriesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensurePolicyTopicEntriesIsMutable(); policyTopicEntries_.add(index, value); onChanged(); } else { policyTopicEntriesBuilder_.addMessage(index, value); } return this; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v21.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public Builder addPolicyTopicEntries( com.google.ads.googleads.v21.common.PolicyTopicEntry.Builder builderForValue) { if (policyTopicEntriesBuilder_ == null) { ensurePolicyTopicEntriesIsMutable(); policyTopicEntries_.add(builderForValue.build()); onChanged(); } else { policyTopicEntriesBuilder_.addMessage(builderForValue.build()); } return this; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v21.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public Builder addPolicyTopicEntries( int index, com.google.ads.googleads.v21.common.PolicyTopicEntry.Builder builderForValue) { if (policyTopicEntriesBuilder_ == null) { ensurePolicyTopicEntriesIsMutable(); policyTopicEntries_.add(index, builderForValue.build()); onChanged(); } else { policyTopicEntriesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v21.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public Builder addAllPolicyTopicEntries( java.lang.Iterable<? extends com.google.ads.googleads.v21.common.PolicyTopicEntry> values) { if (policyTopicEntriesBuilder_ == null) { ensurePolicyTopicEntriesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll( values, policyTopicEntries_); onChanged(); } else { policyTopicEntriesBuilder_.addAllMessages(values); } return this; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v21.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public Builder clearPolicyTopicEntries() { if (policyTopicEntriesBuilder_ == null) { policyTopicEntries_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { policyTopicEntriesBuilder_.clear(); } return this; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v21.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public Builder removePolicyTopicEntries(int index) { if (policyTopicEntriesBuilder_ == null) { ensurePolicyTopicEntriesIsMutable(); policyTopicEntries_.remove(index); onChanged(); } else { policyTopicEntriesBuilder_.remove(index); } return this; } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v21.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public com.google.ads.googleads.v21.common.PolicyTopicEntry.Builder getPolicyTopicEntriesBuilder( int index) { return getPolicyTopicEntriesFieldBuilder().getBuilder(index); } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v21.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public com.google.ads.googleads.v21.common.PolicyTopicEntryOrBuilder getPolicyTopicEntriesOrBuilder( int index) { if (policyTopicEntriesBuilder_ == null) { return policyTopicEntries_.get(index); } else { return policyTopicEntriesBuilder_.getMessageOrBuilder(index); } } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v21.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public java.util.List<? extends com.google.ads.googleads.v21.common.PolicyTopicEntryOrBuilder> getPolicyTopicEntriesOrBuilderList() { if (policyTopicEntriesBuilder_ != null) { return policyTopicEntriesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(policyTopicEntries_); } } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v21.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public com.google.ads.googleads.v21.common.PolicyTopicEntry.Builder addPolicyTopicEntriesBuilder() { return getPolicyTopicEntriesFieldBuilder().addBuilder( com.google.ads.googleads.v21.common.PolicyTopicEntry.getDefaultInstance()); } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v21.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public com.google.ads.googleads.v21.common.PolicyTopicEntry.Builder addPolicyTopicEntriesBuilder( int index) { return getPolicyTopicEntriesFieldBuilder().addBuilder( index, com.google.ads.googleads.v21.common.PolicyTopicEntry.getDefaultInstance()); } /** * <pre> * The list of policy topics for the resource. Contains the PROHIBITED or * FULLY_LIMITED policy topic entries that prevented the resource from being * saved (among any other entries the resource may also have). * </pre> * * <code>repeated .google.ads.googleads.v21.common.PolicyTopicEntry policy_topic_entries = 1;</code> */ public java.util.List<com.google.ads.googleads.v21.common.PolicyTopicEntry.Builder> getPolicyTopicEntriesBuilderList() { return getPolicyTopicEntriesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v21.common.PolicyTopicEntry, com.google.ads.googleads.v21.common.PolicyTopicEntry.Builder, com.google.ads.googleads.v21.common.PolicyTopicEntryOrBuilder> getPolicyTopicEntriesFieldBuilder() { if (policyTopicEntriesBuilder_ == null) { policyTopicEntriesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.ads.googleads.v21.common.PolicyTopicEntry, com.google.ads.googleads.v21.common.PolicyTopicEntry.Builder, com.google.ads.googleads.v21.common.PolicyTopicEntryOrBuilder>( policyTopicEntries_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); policyTopicEntries_ = null; } return policyTopicEntriesBuilder_; } @java.lang.Override public final Builder setUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.ads.googleads.v21.errors.PolicyFindingDetails) } // @@protoc_insertion_point(class_scope:google.ads.googleads.v21.errors.PolicyFindingDetails) private static final com.google.ads.googleads.v21.errors.PolicyFindingDetails DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.ads.googleads.v21.errors.PolicyFindingDetails(); } public static com.google.ads.googleads.v21.errors.PolicyFindingDetails getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<PolicyFindingDetails> PARSER = new com.google.protobuf.AbstractParser<PolicyFindingDetails>() { @java.lang.Override public PolicyFindingDetails parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<PolicyFindingDetails> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<PolicyFindingDetails> getParserForType() { return PARSER; } @java.lang.Override public com.google.ads.googleads.v21.errors.PolicyFindingDetails getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/hadoop
35,951
hadoop-yarn-project/hadoop-yarn/hadoop-yarn-server/hadoop-yarn-server-nodemanager/src/main/java/org/apache/hadoop/yarn/server/nodemanager/containermanager/linux/runtime/RuncContainerRuntime.java
/* * * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. * / */ package org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime; import org.apache.hadoop.classification.VisibleForTesting; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.classification.InterfaceStability; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hdfs.protocol.datatransfer.IOStreamPair; import org.apache.hadoop.security.authorize.AccessControlList; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.concurrent.HadoopExecutors; import org.apache.hadoop.yarn.api.records.ApplicationId; import org.apache.hadoop.yarn.api.records.ContainerId; import org.apache.hadoop.yarn.api.records.LocalResource; import org.apache.hadoop.yarn.conf.YarnConfiguration; import org.apache.hadoop.yarn.exceptions.YarnException; import org.apache.hadoop.yarn.server.nodemanager.ContainerExecutor; import org.apache.hadoop.yarn.server.nodemanager.Context; import org.apache.hadoop.yarn.server.nodemanager.containermanager.container.Container; import org.apache.hadoop.yarn.server.nodemanager.containermanager.launcher.ContainerLaunch; import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.privileged.PrivilegedOperation; import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.privileged.PrivilegedOperationException; import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.privileged.PrivilegedOperationExecutor; import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.resources.CGroupsHandler; import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.resources.ResourceHandlerModule; import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.runc.ImageManifest; import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.runc.RuncContainerExecutorConfig; import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.runc.RuncContainerExecutorConfig.OCILayer; import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.runc.RuncContainerExecutorConfig.OCIRuntimeConfig; import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.runc.RuncContainerExecutorConfig.OCIRuntimeConfig.OCILinuxConfig; import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.runc.RuncContainerExecutorConfig.OCIRuntimeConfig.OCIMount; import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.runc.RuncContainerExecutorConfig.OCIRuntimeConfig.OCIProcessConfig; import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.runc.RuncImageTagToManifestPlugin; import org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.runc.RuncManifestToResourcesPlugin; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.LocalResourceRequest; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.LocalizedResource; import org.apache.hadoop.yarn.server.nodemanager.containermanager.localizer.ResourceLocalizationService; import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.ContainerExecutionException; import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.ContainerRuntimeConstants; import org.apache.hadoop.yarn.server.nodemanager.containermanager.runtime.ContainerRuntimeContext; import org.apache.hadoop.yarn.server.nodemanager.containermanager.volume.csi.ContainerVolumePublisher; import org.apache.hadoop.yarn.server.nodemanager.executor.ContainerExecContext; import java.io.File; import java.io.IOException; import java.net.URISyntaxException; import java.nio.charset.StandardCharsets; import java.nio.file.Files; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.regex.Matcher; import com.fasterxml.jackson.databind.JsonNode; import com.fasterxml.jackson.databind.ObjectMapper; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.apache.hadoop.yarn.conf.YarnConfiguration.DEFAULT_NM_RUNC_IMAGE_TAG_TO_MANIFEST_PLUGIN; import static org.apache.hadoop.yarn.conf.YarnConfiguration.DEFAULT_NM_RUNC_LAYER_MOUNTS_TO_KEEP; import static org.apache.hadoop.yarn.conf.YarnConfiguration.DEFAULT_NM_REAP_RUNC_LAYER_MOUNTS_INTERVAL; import static org.apache.hadoop.yarn.conf.YarnConfiguration.DEFAULT_NM_RUNC_MANIFEST_TO_RESOURCES_PLUGIN; import static org.apache.hadoop.yarn.conf.YarnConfiguration.NM_RUNC_IMAGE_TAG_TO_MANIFEST_PLUGIN; import static org.apache.hadoop.yarn.conf.YarnConfiguration.NM_RUNC_LAYER_MOUNTS_TO_KEEP; import static org.apache.hadoop.yarn.conf.YarnConfiguration.NM_RUNC_MANIFEST_TO_RESOURCES_PLUGIN; import static org.apache.hadoop.yarn.conf.YarnConfiguration.NM_REAP_RUNC_LAYER_MOUNTS_INTERVAL; import static org.apache.hadoop.yarn.server.nodemanager.containermanager.linux.runtime.LinuxContainerRuntimeConstants.*; /** * <p>This class is an extension of {@link OCIContainerRuntime} that uses the * native {@code container-executor} binary via a * {@link PrivilegedOperationExecutor} instance to launch processes inside * Runc containers.</p> * * <p>The following environment variables are used to configure the Runc * engine:</p> * * <ul> * <li> * {@code YARN_CONTAINER_RUNTIME_TYPE} ultimately determines whether a * runC container will be used. If the value is {@code runc}, a runC * container will be used. Otherwise a regular process tree container will * be used. This environment variable is checked by the * {@link #isRuncContainerRequested} method, which is called by the * {@link DelegatingLinuxContainerRuntime}. * </li> * <li> * {@code YARN_CONTAINER_RUNTIME_RUNC_IMAGE} names which image * will be used to launch the Runc container. * </li> * <li> * {@code YARN_CONTAINER_RUNTIME_RUNC_MOUNTS} allows users to specify * additional volume mounts for the runC container. The value of the * environment variable should be a comma-separated list of mounts. * All such mounts must be given as {@code source:dest[:mode]} and the mode * must be "ro" (read-only) or "rw" (read-write) to specify the type of * access being requested. If neither is specified, read-write will be * assumed. The requested mounts will be validated by * container-executor based on the values set in container-executor.cfg for * {@code runc.allowed.ro-mounts} and {@code runc.allowed.rw-mounts}. * </li> * <li> * {@code YARN_CONTAINER_RUNTIME_RUNC_CONTAINER_HOSTNAME} sets the * hostname to be used by the Runc container. If not specified, a * hostname will be derived from the container ID and set as default * hostname for networks other than 'host'. * </li> * </ul> */ @InterfaceAudience.Private @InterfaceStability.Unstable public class RuncContainerRuntime extends OCIContainerRuntime { private static final Logger LOG = LoggerFactory.getLogger(RuncContainerRuntime.class); @InterfaceAudience.Private private static final String RUNTIME_TYPE = "RUNC"; @InterfaceAudience.Private public static final String ENV_RUNC_CONTAINER_IMAGE = "YARN_CONTAINER_RUNTIME_RUNC_IMAGE"; @InterfaceAudience.Private public static final String ENV_RUNC_CONTAINER_MOUNTS = "YARN_CONTAINER_RUNTIME_RUNC_MOUNTS"; @InterfaceAudience.Private public static final String ENV_RUNC_CONTAINER_HOSTNAME = "YARN_CONTAINER_RUNTIME_RUNC_CONTAINER_HOSTNAME"; @InterfaceAudience.Private public final static String ENV_RUNC_CONTAINER_PID_NAMESPACE = formatOciEnvKey(RUNTIME_TYPE, CONTAINER_PID_NAMESPACE_SUFFIX); @InterfaceAudience.Private public final static String ENV_RUNC_CONTAINER_RUN_PRIVILEGED_CONTAINER = formatOciEnvKey(RUNTIME_TYPE, RUN_PRIVILEGED_CONTAINER_SUFFIX); private Configuration conf; private Context nmContext; private PrivilegedOperationExecutor privilegedOperationExecutor; private CGroupsHandler cGroupsHandler; private RuncImageTagToManifestPlugin imageTagToManifestPlugin; private RuncManifestToResourcesPlugin manifestToResourcesPlugin; private ObjectMapper mapper; private String seccomp; private int layersToKeep; private String defaultRuncImage; private ScheduledExecutorService exec; private String seccompProfile; private Set<String> defaultROMounts = new HashSet<>(); private Set<String> defaultRWMounts = new HashSet<>(); private Set<String> allowedNetworks = new HashSet<>(); private Set<String> allowedRuntimes = new HashSet<>(); private AccessControlList privilegedContainersAcl; public RuncContainerRuntime(PrivilegedOperationExecutor privilegedOperationExecutor) { this(privilegedOperationExecutor, ResourceHandlerModule .getCGroupsHandler()); } //A constructor with an injected cGroupsHandler primarily used for testing. @VisibleForTesting public RuncContainerRuntime(PrivilegedOperationExecutor privilegedOperationExecutor, CGroupsHandler cGroupsHandler) { super(privilegedOperationExecutor, cGroupsHandler); this.privilegedOperationExecutor = privilegedOperationExecutor; if (cGroupsHandler == null) { LOG.info("cGroupsHandler is null - cgroups not in use."); } else { this.cGroupsHandler = cGroupsHandler; } } @Override public void initialize(Configuration configuration, Context nmCtx) throws ContainerExecutionException { super.initialize(configuration, nmCtx); this.conf = configuration; this.nmContext = nmCtx; imageTagToManifestPlugin = chooseImageTagToManifestPlugin(); imageTagToManifestPlugin.init(conf); manifestToResourcesPlugin = chooseManifestToResourcesPlugin(); manifestToResourcesPlugin.init(conf); mapper = new ObjectMapper(); defaultRuncImage = conf.get(YarnConfiguration.NM_RUNC_IMAGE_NAME); allowedNetworks.clear(); allowedRuntimes.clear(); allowedNetworks.addAll(Arrays.asList( conf.getTrimmedStrings( YarnConfiguration.NM_RUNC_ALLOWED_CONTAINER_NETWORKS, YarnConfiguration.DEFAULT_NM_RUNC_ALLOWED_CONTAINER_NETWORKS))); allowedRuntimes.addAll(Arrays.asList( conf.getTrimmedStrings( YarnConfiguration.NM_RUNC_ALLOWED_CONTAINER_RUNTIMES, YarnConfiguration.DEFAULT_NM_RUNC_ALLOWED_CONTAINER_RUNTIMES))); privilegedContainersAcl = new AccessControlList(conf.getTrimmed( YarnConfiguration.NM_RUNC_PRIVILEGED_CONTAINERS_ACL, YarnConfiguration.DEFAULT_NM_RUNC_PRIVILEGED_CONTAINERS_ACL)); seccompProfile = conf.get(YarnConfiguration.NM_RUNC_SECCOMP_PROFILE); defaultROMounts.addAll(Arrays.asList( conf.getTrimmedStrings( YarnConfiguration.NM_RUNC_DEFAULT_RO_MOUNTS))); defaultRWMounts.addAll(Arrays.asList( conf.getTrimmedStrings( YarnConfiguration.NM_RUNC_DEFAULT_RW_MOUNTS))); try { //TODO Remove whitespace in seccomp that gets output to config.json if (seccompProfile != null) { seccomp = new String(Files.readAllBytes(Paths.get(seccompProfile)), StandardCharsets.UTF_8); } } catch (IOException ioe) { throw new ContainerExecutionException(ioe); } layersToKeep = conf.getInt(NM_RUNC_LAYER_MOUNTS_TO_KEEP, DEFAULT_NM_RUNC_LAYER_MOUNTS_TO_KEEP); } @Override public void start() { int reapRuncLayerMountsInterval = conf.getInt(NM_REAP_RUNC_LAYER_MOUNTS_INTERVAL, DEFAULT_NM_REAP_RUNC_LAYER_MOUNTS_INTERVAL); exec = HadoopExecutors.newScheduledThreadPool(1); exec.scheduleAtFixedRate( new Runnable() { @Override public void run() { try { PrivilegedOperation launchOp = new PrivilegedOperation( PrivilegedOperation.OperationType.REAP_RUNC_LAYER_MOUNTS); launchOp.appendArgs(Integer.toString(layersToKeep)); try { String stdout = privilegedOperationExecutor .executePrivilegedOperation(null, launchOp, null, null, false, false); if(stdout != null) { LOG.info("Reap layer mounts thread: " + stdout); } } catch (PrivilegedOperationException e) { LOG.warn("Failed to reap old runc layer mounts", e); } } catch (Exception e) { LOG.warn("Reap layer mount thread caught an exception: ", e); } } }, 0, reapRuncLayerMountsInterval, TimeUnit.SECONDS); imageTagToManifestPlugin.start(); manifestToResourcesPlugin.start(); } @Override public void stop() { exec.shutdownNow(); imageTagToManifestPlugin.stop(); manifestToResourcesPlugin.stop(); } @Override public void launchContainer(ContainerRuntimeContext ctx) throws ContainerExecutionException { List<String> env = new ArrayList<>(); Container container = ctx.getContainer(); String runAsUser = ctx.getExecutionAttribute(RUN_AS_USER); String user = ctx.getExecutionAttribute(USER); ContainerId containerId = container.getContainerId(); ApplicationId appId = containerId.getApplicationAttemptId() .getApplicationId(); Map<String, String> environment = container.getLaunchContext() .getEnvironment(); ArrayList<OCIMount> mounts = new ArrayList<>(); ArrayList<OCILayer> layers = new ArrayList<>(); String hostname = environment.get(ENV_RUNC_CONTAINER_HOSTNAME); validateHostname(hostname); String containerIdStr = containerId.toString(); String applicationId = appId.toString(); Path containerWorkDir = ctx.getExecutionAttribute(CONTAINER_WORK_DIR); RuncRuntimeObject runcRuntimeObject = container.getContainerRuntimeData(RuncRuntimeObject.class); List<LocalResource> layerResources = runcRuntimeObject.getOCILayers(); ResourceLocalizationService localizationService = nmContext.getContainerManager().getResourceLocalizationService(); List<String> args = new ArrayList<>(); try { try { LocalResource rsrc = runcRuntimeObject.getConfig(); LocalResourceRequest req = new LocalResourceRequest(rsrc); LocalizedResource localRsrc = localizationService .getLocalizedResource(req, user, appId); if (localRsrc == null) { throw new ContainerExecutionException("Could not successfully " + "localize layers. rsrc: " + rsrc.getResource().getFile()); } File file = new File(localRsrc.getLocalPath().toString()); List<String> imageEnv = extractImageEnv(file); if (imageEnv != null && !imageEnv.isEmpty()) { env.addAll(imageEnv); } List<String> entrypoint = extractImageEntrypoint(file); if (entrypoint != null && !entrypoint.isEmpty()) { args.addAll(entrypoint); } } catch (IOException ioe) { throw new ContainerExecutionException(ioe); } for (LocalResource rsrc : layerResources) { LocalResourceRequest req = new LocalResourceRequest(rsrc); LocalizedResource localRsrc = localizationService .getLocalizedResource(req, user, appId); OCILayer layer = new OCILayer("application/vnd.squashfs", localRsrc.getLocalPath().toString()); layers.add(layer); } } catch (URISyntaxException e) { throw new ContainerExecutionException(e); } setContainerMounts(mounts, ctx, containerWorkDir, environment); String resourcesOpts = ctx.getExecutionAttribute(RESOURCES_OPTIONS); Path nmPrivateContainerScriptPath = ctx.getExecutionAttribute( NM_PRIVATE_CONTAINER_SCRIPT_PATH); Path nmPrivateTokensPath = ctx.getExecutionAttribute(NM_PRIVATE_TOKENS_PATH); int cpuShares = container.getResource().getVirtualCores(); // Zero sets to default of 1024. 2 is the minimum value otherwise if (cpuShares < 2) { cpuShares = 2; } Path launchDst = new Path(containerWorkDir, ContainerLaunch.CONTAINER_SCRIPT); args.add("bash"); args.add(launchDst.toUri().getPath()); String cgroupPath = getCgroupPath(resourcesOpts, "runc-" + containerIdStr); String pidFile = ctx.getExecutionAttribute(PID_FILE_PATH).toString(); @SuppressWarnings("unchecked") List<String> localDirs = ctx.getExecutionAttribute(LOCAL_DIRS); @SuppressWarnings("unchecked") List<String> logDirs = ctx.getExecutionAttribute(LOG_DIRS); Path keystorePath = ctx.getExecutionAttribute(NM_PRIVATE_KEYSTORE_PATH); Path truststorePath = ctx.getExecutionAttribute(NM_PRIVATE_TRUSTSTORE_PATH); int https = 0; String keystore = null; String truststore = null; if (keystorePath != null && truststorePath != null) { https = 1; keystore = keystorePath.toUri().getPath(); truststore = truststorePath.toUri().getPath(); } OCIProcessConfig processConfig = createOCIProcessConfig( containerWorkDir.toString(), env, args); OCILinuxConfig linuxConfig = createOCILinuxConfig(cpuShares, cgroupPath, seccomp); OCIRuntimeConfig ociRuntimeConfig = new OCIRuntimeConfig(null, mounts, processConfig, hostname, null, null, linuxConfig); RuncContainerExecutorConfig runcContainerExecutorConfig = createRuncContainerExecutorConfig(runAsUser, user, containerIdStr, applicationId, pidFile, nmPrivateContainerScriptPath.toString(), nmPrivateTokensPath.toString(), https, keystore, truststore, localDirs, logDirs, layers, ociRuntimeConfig); String commandFile = writeCommandToFile( runcContainerExecutorConfig, container); PrivilegedOperation launchOp = new PrivilegedOperation( PrivilegedOperation.OperationType.RUN_RUNC_CONTAINER); launchOp.appendArgs(commandFile); try { privilegedOperationExecutor.executePrivilegedOperation(null, launchOp, null, null, false, false); } catch (PrivilegedOperationException e) { LOG.info("Launch container failed: ", e); try { LOG.debug("config.json used: " + mapper.writeValueAsString(runcContainerExecutorConfig)); } catch (IOException ioe) { LOG.info("Json Generation Exception", ioe); } throw new ContainerExecutionException("Launch container failed", e .getExitCode(), e.getOutput(), e.getErrorOutput()); } } private String getCgroupPath(String resourcesOptions, String containerIdStr) { if (cGroupsHandler == null) { LOG.debug("cGroupsHandler is null. cgroups are not in use. nothing to" + " do."); return null; } if (resourcesOptions.equals( (PrivilegedOperation.CGROUP_ARG_PREFIX + PrivilegedOperation .CGROUP_ARG_NO_TASKS))) { LOG.debug("no resource restrictions specified. not using runc's " + "cgroup options"); } else { LOG.debug("using runc's cgroups options"); String cGroupPath = "/" + cGroupsHandler.getRelativePathForCGroup( containerIdStr); LOG.debug("using cgroup parent: " + cGroupPath); return cGroupPath; } return null; } private void addUserMounts(List<OCIMount> mounts, Map<String, String> environment, Map<Path, List<String>> localizedResources) throws ContainerExecutionException { if (environment.containsKey(ENV_RUNC_CONTAINER_MOUNTS)) { Matcher parsedMounts = USER_MOUNT_PATTERN.matcher( environment.get(ENV_RUNC_CONTAINER_MOUNTS)); if (!parsedMounts.find()) { throw new ContainerExecutionException( "Unable to parse user supplied mount list: " + environment.get(ENV_RUNC_CONTAINER_MOUNTS)); } parsedMounts.reset(); long mountCount = 0; while (parsedMounts.find()) { mountCount++; String src = parsedMounts.group(1); java.nio.file.Path srcPath = java.nio.file.Paths.get(src); if (!srcPath.isAbsolute()) { src = mountReadOnlyPath(src, localizedResources); } String dst = parsedMounts.group(2); String mode = parsedMounts.group(4); boolean isReadWrite; if (mode == null) { isReadWrite = true; } else if (mode.equals("rw")) { isReadWrite = true; } else if (mode.equals("ro")) { isReadWrite = false; } else { throw new ContainerExecutionException( "Unable to parse mode of some mounts in user supplied " + "mount list: " + environment.get(ENV_RUNC_CONTAINER_MOUNTS)); } addRuncMountLocation(mounts, src, dst, false, isReadWrite); } long commaCount = environment.get(ENV_RUNC_CONTAINER_MOUNTS).chars() .filter(c -> c == ',').count(); if (mountCount != commaCount + 1) { // this means the matcher skipped an improperly formatted mount throw new ContainerExecutionException( "Unable to parse some mounts in user supplied mount list: " + environment.get(ENV_RUNC_CONTAINER_MOUNTS)); } } } private void addDefaultMountLocation(List<OCIMount> mounts, Set<String> defaultMounts, boolean createSource, boolean isReadWrite) throws ContainerExecutionException { if(defaultMounts != null && !defaultMounts.isEmpty()) { for (String mount : defaultMounts) { String[] dir = StringUtils.split(mount, ':'); if (dir.length != 2) { throw new ContainerExecutionException("Invalid mount : " + mount); } String src = dir[0]; String dst = dir[1]; addRuncMountLocation(mounts, src, dst, createSource, isReadWrite); } } } private void addRuncMountLocation(List<OCIMount> mounts, String srcPath, String dstPath, boolean createSource, boolean isReadWrite) { if (!createSource) { boolean sourceExists = new File(srcPath).exists(); if (!sourceExists) { return; } } ArrayList<String> options = new ArrayList<>(); if (isReadWrite) { options.add("rw"); } else { options.add("ro"); } options.add("rbind"); options.add("rprivate"); mounts.add(new OCIMount(dstPath, "bind", srcPath, options)); } private void addAllRuncMountLocations(List<OCIMount> mounts, List<String> paths, boolean createSource, boolean isReadWrite) { for (String dir: paths) { this.addRuncMountLocation(mounts, dir, dir, createSource, isReadWrite); } } public Map<String, LocalResource> getLocalResources( Container container) throws IOException { Map<String, LocalResource> containerLocalRsrc = container.getLaunchContext().getLocalResources(); long layerCount = 0; Map<String, String> environment = container.getLaunchContext().getEnvironment(); String imageName = environment.get(ENV_RUNC_CONTAINER_IMAGE); if (imageName == null || imageName.isEmpty()) { environment.put(ENV_RUNC_CONTAINER_IMAGE, defaultRuncImage); imageName = defaultRuncImage; } ImageManifest manifest = imageTagToManifestPlugin.getManifestFromImageTag(imageName); LocalResource config = manifestToResourcesPlugin.getConfigResource(manifest); List<LocalResource> layers = manifestToResourcesPlugin.getLayerResources(manifest); RuncRuntimeObject runcRuntimeObject = new RuncRuntimeObject(config, layers); container.setContainerRuntimeData(runcRuntimeObject); for (LocalResource localRsrc : layers) { while(containerLocalRsrc.putIfAbsent("runc-layer" + Long.toString(layerCount), localRsrc) != null) { layerCount++; } } while(containerLocalRsrc.putIfAbsent("runc-config" + Long.toString(layerCount), config) != null) { layerCount++; } return containerLocalRsrc; } protected RuncImageTagToManifestPlugin chooseImageTagToManifestPlugin() throws ContainerExecutionException { String pluginName = conf.get(NM_RUNC_IMAGE_TAG_TO_MANIFEST_PLUGIN, DEFAULT_NM_RUNC_IMAGE_TAG_TO_MANIFEST_PLUGIN); RuncImageTagToManifestPlugin runcImageTagToManifestPlugin; try { Class<?> clazz = Class.forName(pluginName); runcImageTagToManifestPlugin = (RuncImageTagToManifestPlugin) clazz.newInstance(); } catch (Exception e) { throw new ContainerExecutionException(e); } return runcImageTagToManifestPlugin; } protected RuncManifestToResourcesPlugin chooseManifestToResourcesPlugin() throws ContainerExecutionException { String pluginName = conf.get(NM_RUNC_MANIFEST_TO_RESOURCES_PLUGIN, DEFAULT_NM_RUNC_MANIFEST_TO_RESOURCES_PLUGIN); LOG.info("pluginName = " + pluginName); RuncManifestToResourcesPlugin runcManifestToResourcesPlugin; try { Class<?> clazz = Class.forName(pluginName); runcManifestToResourcesPlugin = (RuncManifestToResourcesPlugin) clazz.newInstance(); } catch (Exception e) { throw new ContainerExecutionException(e); } return runcManifestToResourcesPlugin; } @SuppressWarnings("unchecked") protected List<String> extractImageEnv(File config) throws IOException { JsonNode node = mapper.readTree(config); JsonNode envNode = node.path("config").path("Env"); if (envNode.isMissingNode()) { return null; } return mapper.readValue(envNode.traverse(), List.class); } @SuppressWarnings("unchecked") protected List<String> extractImageEntrypoint(File config) throws IOException { JsonNode node = mapper.readTree(config); JsonNode entrypointNode = node.path("config").path("Entrypoint"); if (entrypointNode.isMissingNode()) { return null; } return mapper.readValue(entrypointNode.traverse(), List.class); } private RuncContainerExecutorConfig createRuncContainerExecutorConfig( String runAsUser, String username, String containerId, String applicationId, String pidFile, String containerScriptPath, String containerCredentialsPath, int https, String keystorePath, String truststorePath, List<String> localDirs, List<String> logDirs, List<OCILayer> layers, OCIRuntimeConfig ociRuntimeConfig) { return new RuncContainerExecutorConfig(runAsUser, username, containerId, applicationId, pidFile, containerScriptPath, containerCredentialsPath, https, keystorePath, truststorePath, localDirs, logDirs, layers, layersToKeep, ociRuntimeConfig); } private OCIProcessConfig createOCIProcessConfig(String cwd, List<String> env, List<String> args) { return new OCIProcessConfig(false, null, cwd, env, args, null, null, null, false, 0, null, null); } private OCILinuxConfig createOCILinuxConfig(long cpuShares, String cgroupsPath, String seccompProf) { OCILinuxConfig.Resources.CPU cgroupCPU = new OCILinuxConfig.Resources.CPU(cpuShares, 0, 0, 0, 0, null, null); OCILinuxConfig.Resources cgroupResources = new OCILinuxConfig.Resources(null, null, cgroupCPU, null, null, null, null, null); return new OCILinuxConfig(null, null, null, null, cgroupsPath, cgroupResources, null, null, seccompProf, null, null, null, null); } private void setContainerMounts(ArrayList<OCIMount> mounts, ContainerRuntimeContext ctx, Path containerWorkDir, Map<String, String> environment) throws ContainerExecutionException { @SuppressWarnings("unchecked") List<String> filecacheDirs = ctx.getExecutionAttribute(FILECACHE_DIRS); @SuppressWarnings("unchecked") List<String> containerLogDirs = ctx.getExecutionAttribute( CONTAINER_LOG_DIRS); @SuppressWarnings("unchecked") List<String> userFilecacheDirs = ctx.getExecutionAttribute(USER_FILECACHE_DIRS); @SuppressWarnings("unchecked") List<String> applicationLocalDirs = ctx.getExecutionAttribute(APPLICATION_LOCAL_DIRS); @SuppressWarnings("unchecked") Map<Path, List<String>> localizedResources = ctx.getExecutionAttribute( LOCALIZED_RESOURCES); addRuncMountLocation(mounts, containerWorkDir.toString() + "/private_slash_tmp", "/tmp", true, true); addRuncMountLocation(mounts, containerWorkDir.toString() + "/private_var_slash_tmp", "/var/tmp", true, true); addAllRuncMountLocations(mounts, containerLogDirs, true, true); addAllRuncMountLocations(mounts, applicationLocalDirs, true, true); addAllRuncMountLocations(mounts, filecacheDirs, false, false); addAllRuncMountLocations(mounts, userFilecacheDirs, false, false); addDefaultMountLocation(mounts, defaultROMounts, false, false); addDefaultMountLocation(mounts, defaultRWMounts, false, true); addUserMounts(mounts, environment, localizedResources); } public String writeCommandToFile( RuncContainerExecutorConfig runcContainerExecutorConfig, Container container) throws ContainerExecutionException { ContainerId containerId = container.getContainerId(); String filePrefix = containerId.toString(); ApplicationId appId = containerId.getApplicationAttemptId() .getApplicationId(); File commandFile; try { File cmdDir = null; if(nmContext != null && nmContext.getLocalDirsHandler() != null) { String cmdDirStr = nmContext.getLocalDirsHandler().getLocalPathForWrite( ResourceLocalizationService.NM_PRIVATE_DIR + Path.SEPARATOR + appId + Path.SEPARATOR + filePrefix + Path.SEPARATOR).toString(); cmdDir = new File(cmdDirStr); if (!cmdDir.mkdirs() && !cmdDir.exists()) { throw new IOException("Cannot create container private directory " + cmdDir); } } commandFile = new File(cmdDir + "/runc-config.json"); try { mapper.writeValue(commandFile, runcContainerExecutorConfig); } catch (IOException ioe) { throw new ContainerExecutionException(ioe); } return commandFile.getAbsolutePath(); } catch (IOException e) { LOG.warn("Unable to write runc config.json to temporary file!"); throw new ContainerExecutionException(e); } } public String getExposedPorts(Container container) { return null; } public String[] getIpAndHost(Container container) { return null; } public IOStreamPair execContainer(ContainerExecContext ctx) throws ContainerExecutionException { return null; } public void reapContainer(ContainerRuntimeContext ctx) throws ContainerExecutionException { } public void relaunchContainer(ContainerRuntimeContext ctx) throws ContainerExecutionException { } /** * Return whether the given environment variables indicate that the operation * is requesting a Runc container. If the environment contains a key * called {@code YARN_CONTAINER_RUNTIME_TYPE} whose value is {@code runc}, * this method will return true. Otherwise it will return false. * * @param daemonConf the NodeManager daemon configuration * @param env the environment variable settings for the operation * @return whether a Runc container is requested */ public static boolean isRuncContainerRequested(Configuration daemonConf, Map<String, String> env) { String type = (env == null) ? null : env.get(ContainerRuntimeConstants.ENV_CONTAINER_TYPE); if (type == null) { type = daemonConf.get(YarnConfiguration.LINUX_CONTAINER_RUNTIME_TYPE); } return type != null && type.equals( ContainerRuntimeConstants.CONTAINER_RUNTIME_RUNC); } @Override public boolean isRuntimeRequested(Map<String, String> env) { return isRuncContainerRequested(conf, env); } @Override public void signalContainer(ContainerRuntimeContext ctx) throws ContainerExecutionException { ContainerExecutor.Signal signal = ctx.getExecutionAttribute(SIGNAL); Container container = ctx.getContainer(); if (signal == ContainerExecutor.Signal.KILL || signal == ContainerExecutor.Signal.TERM) { ContainerVolumePublisher publisher = new ContainerVolumePublisher( container, container.getCsiVolumesRootDir(), this); try { publisher.unpublishVolumes(); } catch (YarnException | IOException e) { throw new ContainerExecutionException(e); } } PrivilegedOperation signalOp = new PrivilegedOperation( PrivilegedOperation.OperationType.SIGNAL_CONTAINER); signalOp.appendArgs(ctx.getExecutionAttribute(RUN_AS_USER), ctx.getExecutionAttribute(USER), Integer.toString(PrivilegedOperation.RunAsUserCommand .SIGNAL_CONTAINER.getValue()), ctx.getExecutionAttribute(PID), Integer.toString(signal.getValue())); //Some failures here are acceptable. Let the calling executor decide. signalOp.disableFailureLogging(); try { PrivilegedOperationExecutor executor = PrivilegedOperationExecutor .getInstance(conf); executor.executePrivilegedOperation(null, signalOp, null, null, false, false); } catch (PrivilegedOperationException e) { //Don't log the failure here. Some kinds of signaling failures are // acceptable. Let the calling executor decide what to do. throw new ContainerExecutionException("Signal container failed", e .getExitCode(), e.getOutput(), e.getErrorOutput()); } } @InterfaceStability.Unstable static class RuncRuntimeObject { private final List<LocalResource> layers; private final LocalResource config; RuncRuntimeObject(LocalResource config, List<LocalResource> layers) { this.config = config; this.layers = layers; } public LocalResource getConfig() { return this.config; } public List<LocalResource> getOCILayers() { return this.layers; } } boolean getHostPidNamespaceEnabled() { return conf.getBoolean( YarnConfiguration.NM_RUNC_ALLOW_HOST_PID_NAMESPACE, YarnConfiguration.DEFAULT_NM_RUNC_ALLOW_HOST_PID_NAMESPACE); } boolean getPrivilegedContainersEnabledOnCluster() { return conf.getBoolean( YarnConfiguration.NM_RUNC_ALLOW_PRIVILEGED_CONTAINERS, YarnConfiguration.DEFAULT_NM_RUNC_ALLOW_PRIVILEGED_CONTAINERS); } Set<String> getAllowedNetworks() { return allowedNetworks; } Set<String> getAllowedRuntimes() { return allowedRuntimes; } AccessControlList getPrivilegedContainersAcl() { return privilegedContainersAcl; } String getEnvOciContainerPidNamespace() { return ENV_RUNC_CONTAINER_PID_NAMESPACE; } String getEnvOciContainerRunPrivilegedContainer() { return ENV_RUNC_CONTAINER_RUN_PRIVILEGED_CONTAINER; } }
apache/hadoop
36,033
hadoop-common-project/hadoop-common/src/test/java/org/apache/hadoop/fs/shell/find/TestFind.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.fs.shell.find; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertFalse; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.junit.jupiter.api.Assertions.fail; import static org.mockito.Mockito.any; import static org.mockito.Mockito.anyInt; import static org.mockito.Mockito.eq; import static org.mockito.Mockito.inOrder; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.verifyNoMoreInteractions; import static org.mockito.Mockito.when; import java.io.IOException; import java.io.PrintStream; import java.util.Arrays; import java.util.Collections; import java.util.LinkedList; import java.util.NoSuchElementException; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileStatus; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.fs.shell.PathData; import org.apache.hadoop.fs.shell.find.BaseExpression; import org.apache.hadoop.fs.shell.find.Expression; import org.apache.hadoop.fs.shell.find.Find; import org.apache.hadoop.fs.shell.find.FindOptions; import org.apache.hadoop.fs.shell.find.Result; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Timeout; import org.mockito.InOrder; import org.mockito.Mockito; import org.mockito.invocation.InvocationOnMock; import org.mockito.stubbing.Answer; @Timeout(10) public class TestFind { private static FileSystem mockFs; private static Configuration conf; @BeforeEach public void setup() throws IOException { mockFs = MockFileSystem.setup(); conf = mockFs.getConf(); } // check follow link option is recognized @Test public void processOptionsFollowLink() throws IOException { Find find = new Find(); String args = "-L path"; find.processOptions(getArgs(args)); assertTrue(find.getOptions().isFollowLink()); assertFalse(find.getOptions().isFollowArgLink()); } // check follow arg link option is recognized @Test public void processOptionsFollowArgLink() throws IOException { Find find = new Find(); String args = "-H path"; find.processOptions(getArgs(args)); assertFalse(find.getOptions().isFollowLink()); assertTrue(find.getOptions().isFollowArgLink()); } // check follow arg link option is recognized @Test public void processOptionsFollowLinkFollowArgLink() throws IOException { Find find = new Find(); String args = "-L -H path"; find.processOptions(getArgs(args)); assertTrue(find.getOptions().isFollowLink()); // follow link option takes precedence over follow arg link assertFalse(find.getOptions().isFollowArgLink()); } // check options and expressions are stripped from args leaving paths @Test public void processOptionsExpression() throws IOException { Find find = new Find(); find.setConf(conf); String paths = "path1 path2 path3"; String args = "-L -H " + paths + " -print -name test"; LinkedList<String> argsList = getArgs(args); find.processOptions(argsList); LinkedList<String> pathList = getArgs(paths); assertEquals(pathList, argsList); } // check print is used as the default expression @Test public void processOptionsNoExpression() throws IOException { Find find = new Find(); find.setConf(conf); String args = "path"; String expected = "Print(;)"; find.processOptions(getArgs(args)); Expression expression = find.getRootExpression(); assertEquals(expected, expression.toString()); } // check unknown options are rejected @Test public void processOptionsUnknown() throws IOException { Find find = new Find(); find.setConf(conf); String args = "path -unknown"; try { find.processOptions(getArgs(args)); fail("Unknown expression not caught"); } catch (IOException e) { } } // check unknown options are rejected when mixed with known options @Test public void processOptionsKnownUnknown() throws IOException { Find find = new Find(); find.setConf(conf); String args = "path -print -unknown -print"; try { find.processOptions(getArgs(args)); fail("Unknown expression not caught"); } catch (IOException e) { } } // check no path defaults to current working directory @Test public void processOptionsNoPath() throws IOException { Find find = new Find(); find.setConf(conf); String args = "-print"; LinkedList<String> argsList = getArgs(args); find.processOptions(argsList); assertEquals(Collections.singletonList(Path.CUR_DIR), argsList); } // check -name is handled correctly @Test public void processOptionsName() throws IOException { Find find = new Find(); find.setConf(conf); String args = "path -name namemask"; String expected = "And(;Name(namemask;),Print(;))"; find.processOptions(getArgs(args)); Expression expression = find.getRootExpression(); assertEquals(expected, expression.toString()); } // check -iname is handled correctly @Test public void processOptionsIname() throws IOException { Find find = new Find(); find.setConf(conf); String args = "path -iname namemask"; String expected = "And(;Iname-Name(namemask;),Print(;))"; find.processOptions(getArgs(args)); Expression expression = find.getRootExpression(); assertEquals(expected, expression.toString()); } // check -print is handled correctly @Test public void processOptionsPrint() throws IOException { Find find = new Find(); find.setConf(conf); String args = "path -print"; String expected = "Print(;)"; find.processOptions(getArgs(args)); Expression expression = find.getRootExpression(); assertEquals(expected, expression.toString()); } // check -print0 is handled correctly @Test public void processOptionsPrint0() throws IOException { Find find = new Find(); find.setConf(conf); String args = "path -print0"; String expected = "Print0-Print(;)"; find.processOptions(getArgs(args)); Expression expression = find.getRootExpression(); assertEquals(expected, expression.toString()); } // check an implicit and is handled correctly @Test public void processOptionsNoop() throws IOException { Find find = new Find(); find.setConf(conf); String args = "path -name one -name two -print"; String expected = "And(;And(;Name(one;),Name(two;)),Print(;))"; find.processOptions(getArgs(args)); Expression expression = find.getRootExpression(); assertEquals(expected, expression.toString()); } // check -a is handled correctly @Test public void processOptionsA() throws IOException { Find find = new Find(); find.setConf(conf); String args = "path -name one -a -name two -a -print"; String expected = "And(;And(;Name(one;),Name(two;)),Print(;))"; find.processOptions(getArgs(args)); Expression expression = find.getRootExpression(); assertEquals(expected, expression.toString()); } // check -and is handled correctly @Test public void processOptionsAnd() throws IOException { Find find = new Find(); find.setConf(conf); String args = "path -name one -and -name two -and -print"; String expected = "And(;And(;Name(one;),Name(two;)),Print(;))"; find.processOptions(getArgs(args)); Expression expression = find.getRootExpression(); assertEquals(expected, expression.toString()); } // check expressions are called in the correct order @Test public void processArguments() throws IOException { LinkedList<PathData> items = createDirectories(); Find find = new Find(); find.setConf(conf); PrintStream out = mock(PrintStream.class); find.getOptions().setOut(out); PrintStream err = mock(PrintStream.class); find.getOptions().setErr(err); Expression expr = mock(Expression.class); when(expr.apply((PathData) any(), anyInt())).thenReturn(Result.PASS); FileStatusChecker fsCheck = mock(FileStatusChecker.class); Expression test = new TestExpression(expr, fsCheck); find.setRootExpression(test); find.processArguments(items); InOrder inOrder = inOrder(expr); inOrder.verify(expr).setOptions(find.getOptions()); inOrder.verify(expr).prepare(); inOrder.verify(expr).apply(item1, 0); inOrder.verify(expr).apply(item1a, 1); inOrder.verify(expr).apply(item1aa, 2); inOrder.verify(expr).apply(item1b, 1); inOrder.verify(expr).apply(item2, 0); inOrder.verify(expr).apply(item3, 0); inOrder.verify(expr).apply(item4, 0); inOrder.verify(expr).apply(item5, 0); inOrder.verify(expr).apply(item5a, 1); inOrder.verify(expr).apply(item5b, 1); inOrder.verify(expr).apply(item5c, 1); inOrder.verify(expr).apply(item5ca, 2); inOrder.verify(expr).apply(item5d, 1); inOrder.verify(expr).apply(item5e, 1); inOrder.verify(expr).finish(); verifyNoMoreInteractions(expr); InOrder inOrderFsCheck = inOrder(fsCheck); inOrderFsCheck.verify(fsCheck).check(item1.stat); inOrderFsCheck.verify(fsCheck).check(item1a.stat); inOrderFsCheck.verify(fsCheck).check(item1aa.stat); inOrderFsCheck.verify(fsCheck).check(item1b.stat); inOrderFsCheck.verify(fsCheck).check(item2.stat); inOrderFsCheck.verify(fsCheck).check(item3.stat); inOrderFsCheck.verify(fsCheck).check(item4.stat); inOrderFsCheck.verify(fsCheck).check(item5.stat); inOrderFsCheck.verify(fsCheck).check(item5a.stat); inOrderFsCheck.verify(fsCheck).check(item5b.stat); inOrderFsCheck.verify(fsCheck).check(item5c.stat); inOrderFsCheck.verify(fsCheck).check(item5ca.stat); inOrderFsCheck.verify(fsCheck).check(item5d.stat); inOrderFsCheck.verify(fsCheck).check(item5e.stat); verifyNoMoreInteractions(fsCheck); verifyNoMoreInteractions(out); verifyNoMoreInteractions(err); } // check that directories are descended correctly when -depth is specified @Test public void processArgumentsDepthFirst() throws IOException { LinkedList<PathData> items = createDirectories(); Find find = new Find(); find.getOptions().setDepthFirst(true); find.setConf(conf); PrintStream out = mock(PrintStream.class); find.getOptions().setOut(out); PrintStream err = mock(PrintStream.class); find.getOptions().setErr(err); Expression expr = mock(Expression.class); when(expr.apply((PathData) any(), anyInt())).thenReturn(Result.PASS); FileStatusChecker fsCheck = mock(FileStatusChecker.class); Expression test = new TestExpression(expr, fsCheck); find.setRootExpression(test); find.processArguments(items); InOrder inOrder = inOrder(expr); inOrder.verify(expr).setOptions(find.getOptions()); inOrder.verify(expr).prepare(); inOrder.verify(expr).apply(item1aa, 2); inOrder.verify(expr).apply(item1a, 1); inOrder.verify(expr).apply(item1b, 1); inOrder.verify(expr).apply(item1, 0); inOrder.verify(expr).apply(item2, 0); inOrder.verify(expr).apply(item3, 0); inOrder.verify(expr).apply(item4, 0); inOrder.verify(expr).apply(item5a, 1); inOrder.verify(expr).apply(item5b, 1); inOrder.verify(expr).apply(item5ca, 2); inOrder.verify(expr).apply(item5c, 1); inOrder.verify(expr).apply(item5d, 1); inOrder.verify(expr).apply(item5e, 1); inOrder.verify(expr).apply(item5, 0); inOrder.verify(expr).finish(); verifyNoMoreInteractions(expr); InOrder inOrderFsCheck = inOrder(fsCheck); inOrderFsCheck.verify(fsCheck).check(item1aa.stat); inOrderFsCheck.verify(fsCheck).check(item1a.stat); inOrderFsCheck.verify(fsCheck).check(item1b.stat); inOrderFsCheck.verify(fsCheck).check(item1.stat); inOrderFsCheck.verify(fsCheck).check(item2.stat); inOrderFsCheck.verify(fsCheck).check(item3.stat); inOrderFsCheck.verify(fsCheck).check(item4.stat); inOrderFsCheck.verify(fsCheck).check(item5a.stat); inOrderFsCheck.verify(fsCheck).check(item5b.stat); inOrderFsCheck.verify(fsCheck).check(item5ca.stat); inOrderFsCheck.verify(fsCheck).check(item5c.stat); inOrderFsCheck.verify(fsCheck).check(item5d.stat); inOrderFsCheck.verify(fsCheck).check(item5e.stat); inOrderFsCheck.verify(fsCheck).check(item5.stat); verifyNoMoreInteractions(fsCheck); verifyNoMoreInteractions(out); verifyNoMoreInteractions(err); } // check symlinks given as path arguments are processed correctly with the // follow arg option set @Test public void processArgumentsOptionFollowArg() throws IOException { LinkedList<PathData> items = createDirectories(); Find find = new Find(); find.getOptions().setFollowArgLink(true); find.setConf(conf); PrintStream out = mock(PrintStream.class); find.getOptions().setOut(out); PrintStream err = mock(PrintStream.class); find.getOptions().setErr(err); Expression expr = mock(Expression.class); when(expr.apply((PathData) any(), anyInt())).thenReturn(Result.PASS); FileStatusChecker fsCheck = mock(FileStatusChecker.class); Expression test = new TestExpression(expr, fsCheck); find.setRootExpression(test); find.processArguments(items); InOrder inOrder = inOrder(expr); inOrder.verify(expr).setOptions(find.getOptions()); inOrder.verify(expr).prepare(); inOrder.verify(expr).apply(item1, 0); inOrder.verify(expr).apply(item1a, 1); inOrder.verify(expr).apply(item1aa, 2); inOrder.verify(expr).apply(item1b, 1); inOrder.verify(expr).apply(item2, 0); inOrder.verify(expr).apply(item3, 0); inOrder.verify(expr).apply(item4, 0); inOrder.verify(expr).apply(item5, 0); inOrder.verify(expr).apply(item5a, 1); inOrder.verify(expr).apply(item5b, 1); inOrder.verify(expr).apply(item5c, 1); inOrder.verify(expr).apply(item5ca, 2); inOrder.verify(expr).apply(item5d, 1); inOrder.verify(expr).apply(item5e, 1); inOrder.verify(expr).finish(); verifyNoMoreInteractions(expr); InOrder inOrderFsCheck = inOrder(fsCheck); inOrderFsCheck.verify(fsCheck).check(item1.stat); inOrderFsCheck.verify(fsCheck).check(item1a.stat); inOrderFsCheck.verify(fsCheck).check(item1aa.stat); inOrderFsCheck.verify(fsCheck).check(item1b.stat); inOrderFsCheck.verify(fsCheck).check(item2.stat); inOrderFsCheck.verify(fsCheck, times(2)).check(item3.stat); inOrderFsCheck.verify(fsCheck).check(item5.stat); inOrderFsCheck.verify(fsCheck).check(item5a.stat); inOrderFsCheck.verify(fsCheck).check(item5b.stat); inOrderFsCheck.verify(fsCheck).check(item5c.stat); inOrderFsCheck.verify(fsCheck).check(item5ca.stat); inOrderFsCheck.verify(fsCheck).check(item5d.stat); inOrderFsCheck.verify(fsCheck).check(item5e.stat); verifyNoMoreInteractions(fsCheck); verifyNoMoreInteractions(out); verifyNoMoreInteractions(err); } // check symlinks given as path arguments are processed correctly with the // follow option @Test public void processArgumentsOptionFollow() throws IOException { LinkedList<PathData> items = createDirectories(); Find find = new Find(); find.getOptions().setFollowLink(true); find.setConf(conf); PrintStream out = mock(PrintStream.class); find.getOptions().setOut(out); PrintStream err = mock(PrintStream.class); find.getOptions().setErr(err); Expression expr = mock(Expression.class); when(expr.apply((PathData) any(), anyInt())).thenReturn(Result.PASS); FileStatusChecker fsCheck = mock(FileStatusChecker.class); Expression test = new TestExpression(expr, fsCheck); find.setRootExpression(test); find.processArguments(items); InOrder inOrder = inOrder(expr); inOrder.verify(expr).setOptions(find.getOptions()); inOrder.verify(expr).prepare(); inOrder.verify(expr).apply(item1, 0); inOrder.verify(expr).apply(item1a, 1); inOrder.verify(expr).apply(item1aa, 2); inOrder.verify(expr).apply(item1b, 1); inOrder.verify(expr).apply(item2, 0); inOrder.verify(expr).apply(item3, 0); inOrder.verify(expr).apply(item4, 0); inOrder.verify(expr).apply(item5, 0); inOrder.verify(expr).apply(item5a, 1); inOrder.verify(expr).apply(item5b, 1); // triggers infinite loop message inOrder.verify(expr).apply(item5c, 1); inOrder.verify(expr).apply(item5ca, 2); inOrder.verify(expr).apply(item5d, 1); inOrder.verify(expr).apply(item5ca, 2); // following item5d symlink inOrder.verify(expr).apply(item5e, 1); inOrder.verify(expr).finish(); verifyNoMoreInteractions(expr); InOrder inOrderFsCheck = inOrder(fsCheck); inOrderFsCheck.verify(fsCheck).check(item1.stat); inOrderFsCheck.verify(fsCheck).check(item1a.stat); inOrderFsCheck.verify(fsCheck).check(item1aa.stat); inOrderFsCheck.verify(fsCheck).check(item1b.stat); inOrderFsCheck.verify(fsCheck).check(item2.stat); inOrderFsCheck.verify(fsCheck, times(2)).check(item3.stat); inOrderFsCheck.verify(fsCheck).check(item5.stat); inOrderFsCheck.verify(fsCheck).check(item1b.stat); inOrderFsCheck.verify(fsCheck).check(item5.stat); inOrderFsCheck.verify(fsCheck).check(item5c.stat); inOrderFsCheck.verify(fsCheck).check(item5ca.stat); inOrderFsCheck.verify(fsCheck).check(item5c.stat); inOrderFsCheck.verify(fsCheck, times(2)).check(item5ca.stat); verifyNoMoreInteractions(fsCheck); verifyNoMoreInteractions(out); verify(err).println( "Infinite loop ignored: " + item5b.toString() + " -> " + item5.toString()); verifyNoMoreInteractions(err); } // check minimum depth is handledfollowLink @Test public void processArgumentsMinDepth() throws IOException { LinkedList<PathData> items = createDirectories(); Find find = new Find(); find.getOptions().setMinDepth(1); find.setConf(conf); PrintStream out = mock(PrintStream.class); find.getOptions().setOut(out); PrintStream err = mock(PrintStream.class); find.getOptions().setErr(err); Expression expr = mock(Expression.class); when(expr.apply((PathData) any(), anyInt())).thenReturn(Result.PASS); FileStatusChecker fsCheck = mock(FileStatusChecker.class); Expression test = new TestExpression(expr, fsCheck); find.setRootExpression(test); find.processArguments(items); InOrder inOrder = inOrder(expr); inOrder.verify(expr).setOptions(find.getOptions()); inOrder.verify(expr).prepare(); inOrder.verify(expr).apply(item1a, 1); inOrder.verify(expr).apply(item1aa, 2); inOrder.verify(expr).apply(item1b, 1); inOrder.verify(expr).apply(item5a, 1); inOrder.verify(expr).apply(item5b, 1); inOrder.verify(expr).apply(item5c, 1); inOrder.verify(expr).apply(item5ca, 2); inOrder.verify(expr).apply(item5d, 1); inOrder.verify(expr).apply(item5e, 1); inOrder.verify(expr).finish(); verifyNoMoreInteractions(expr); InOrder inOrderFsCheck = inOrder(fsCheck); inOrderFsCheck.verify(fsCheck).check(item1a.stat); inOrderFsCheck.verify(fsCheck).check(item1aa.stat); inOrderFsCheck.verify(fsCheck).check(item1b.stat); inOrderFsCheck.verify(fsCheck).check(item5a.stat); inOrderFsCheck.verify(fsCheck).check(item5b.stat); inOrderFsCheck.verify(fsCheck).check(item5c.stat); inOrderFsCheck.verify(fsCheck).check(item5ca.stat); inOrderFsCheck.verify(fsCheck).check(item5d.stat); inOrderFsCheck.verify(fsCheck).check(item5e.stat); verifyNoMoreInteractions(fsCheck); verifyNoMoreInteractions(out); verifyNoMoreInteractions(err); } // check maximum depth is handled @Test public void processArgumentsMaxDepth() throws IOException { LinkedList<PathData> items = createDirectories(); Find find = new Find(); find.getOptions().setMaxDepth(1); find.setConf(conf); PrintStream out = mock(PrintStream.class); find.getOptions().setOut(out); PrintStream err = mock(PrintStream.class); find.getOptions().setErr(err); Expression expr = mock(Expression.class); when(expr.apply((PathData) any(), anyInt())).thenReturn(Result.PASS); FileStatusChecker fsCheck = mock(FileStatusChecker.class); Expression test = new TestExpression(expr, fsCheck); find.setRootExpression(test); find.processArguments(items); InOrder inOrder = inOrder(expr); inOrder.verify(expr).setOptions(find.getOptions()); inOrder.verify(expr).prepare(); inOrder.verify(expr).apply(item1, 0); inOrder.verify(expr).apply(item1a, 1); inOrder.verify(expr).apply(item1b, 1); inOrder.verify(expr).apply(item2, 0); inOrder.verify(expr).apply(item3, 0); inOrder.verify(expr).apply(item4, 0); inOrder.verify(expr).apply(item5, 0); inOrder.verify(expr).apply(item5a, 1); inOrder.verify(expr).apply(item5b, 1); inOrder.verify(expr).apply(item5c, 1); inOrder.verify(expr).apply(item5d, 1); inOrder.verify(expr).apply(item5e, 1); inOrder.verify(expr).finish(); verifyNoMoreInteractions(expr); InOrder inOrderFsCheck = inOrder(fsCheck); inOrderFsCheck.verify(fsCheck).check(item1.stat); inOrderFsCheck.verify(fsCheck).check(item1a.stat); inOrderFsCheck.verify(fsCheck).check(item1b.stat); inOrderFsCheck.verify(fsCheck).check(item2.stat); inOrderFsCheck.verify(fsCheck).check(item3.stat); inOrderFsCheck.verify(fsCheck).check(item4.stat); inOrderFsCheck.verify(fsCheck).check(item5.stat); inOrderFsCheck.verify(fsCheck).check(item5a.stat); inOrderFsCheck.verify(fsCheck).check(item5b.stat); inOrderFsCheck.verify(fsCheck).check(item5c.stat); inOrderFsCheck.verify(fsCheck).check(item5d.stat); inOrderFsCheck.verify(fsCheck).check(item5e.stat); verifyNoMoreInteractions(fsCheck); verifyNoMoreInteractions(out); verifyNoMoreInteractions(err); } // check min depth is handled when -depth is specified @Test public void processArgumentsDepthFirstMinDepth() throws IOException { LinkedList<PathData> items = createDirectories(); Find find = new Find(); find.getOptions().setDepthFirst(true); find.getOptions().setMinDepth(1); find.setConf(conf); PrintStream out = mock(PrintStream.class); find.getOptions().setOut(out); PrintStream err = mock(PrintStream.class); find.getOptions().setErr(err); Expression expr = mock(Expression.class); when(expr.apply((PathData) any(), anyInt())).thenReturn(Result.PASS); FileStatusChecker fsCheck = mock(FileStatusChecker.class); Expression test = new TestExpression(expr, fsCheck); find.setRootExpression(test); find.processArguments(items); InOrder inOrder = inOrder(expr); inOrder.verify(expr).setOptions(find.getOptions()); inOrder.verify(expr).prepare(); inOrder.verify(expr).apply(item1aa, 2); inOrder.verify(expr).apply(item1a, 1); inOrder.verify(expr).apply(item1b, 1); inOrder.verify(expr).apply(item5a, 1); inOrder.verify(expr).apply(item5b, 1); inOrder.verify(expr).apply(item5ca, 2); inOrder.verify(expr).apply(item5c, 1); inOrder.verify(expr).apply(item5d, 1); inOrder.verify(expr).apply(item5e, 1); inOrder.verify(expr).finish(); verifyNoMoreInteractions(expr); InOrder inOrderFsCheck = inOrder(fsCheck); inOrderFsCheck.verify(fsCheck).check(item1aa.stat); inOrderFsCheck.verify(fsCheck).check(item1a.stat); inOrderFsCheck.verify(fsCheck).check(item1b.stat); inOrderFsCheck.verify(fsCheck).check(item5a.stat); inOrderFsCheck.verify(fsCheck).check(item5b.stat); inOrderFsCheck.verify(fsCheck).check(item5ca.stat); inOrderFsCheck.verify(fsCheck).check(item5c.stat); inOrderFsCheck.verify(fsCheck).check(item5d.stat); inOrderFsCheck.verify(fsCheck).check(item5e.stat); verifyNoMoreInteractions(fsCheck); verifyNoMoreInteractions(out); verifyNoMoreInteractions(err); } // check max depth is handled when -depth is specified @Test public void processArgumentsDepthFirstMaxDepth() throws IOException { LinkedList<PathData> items = createDirectories(); Find find = new Find(); find.getOptions().setDepthFirst(true); find.getOptions().setMaxDepth(1); find.setConf(conf); PrintStream out = mock(PrintStream.class); find.getOptions().setOut(out); PrintStream err = mock(PrintStream.class); find.getOptions().setErr(err); Expression expr = mock(Expression.class); when(expr.apply((PathData) any(), anyInt())).thenReturn(Result.PASS); FileStatusChecker fsCheck = mock(FileStatusChecker.class); Expression test = new TestExpression(expr, fsCheck); find.setRootExpression(test); find.processArguments(items); InOrder inOrder = inOrder(expr); inOrder.verify(expr).setOptions(find.getOptions()); inOrder.verify(expr).prepare(); inOrder.verify(expr).apply(item1a, 1); inOrder.verify(expr).apply(item1b, 1); inOrder.verify(expr).apply(item1, 0); inOrder.verify(expr).apply(item2, 0); inOrder.verify(expr).apply(item3, 0); inOrder.verify(expr).apply(item4, 0); inOrder.verify(expr).apply(item5a, 1); inOrder.verify(expr).apply(item5b, 1); inOrder.verify(expr).apply(item5c, 1); inOrder.verify(expr).apply(item5d, 1); inOrder.verify(expr).apply(item5e, 1); inOrder.verify(expr).apply(item5, 0); inOrder.verify(expr).finish(); verifyNoMoreInteractions(expr); InOrder inOrderFsCheck = inOrder(fsCheck); inOrderFsCheck.verify(fsCheck).check(item1a.stat); inOrderFsCheck.verify(fsCheck).check(item1b.stat); inOrderFsCheck.verify(fsCheck).check(item1.stat); inOrderFsCheck.verify(fsCheck).check(item2.stat); inOrderFsCheck.verify(fsCheck).check(item3.stat); inOrderFsCheck.verify(fsCheck).check(item4.stat); inOrderFsCheck.verify(fsCheck).check(item5a.stat); inOrderFsCheck.verify(fsCheck).check(item5b.stat); inOrderFsCheck.verify(fsCheck).check(item5c.stat); inOrderFsCheck.verify(fsCheck).check(item5d.stat); inOrderFsCheck.verify(fsCheck).check(item5e.stat); inOrderFsCheck.verify(fsCheck).check(item5.stat); verifyNoMoreInteractions(fsCheck); verifyNoMoreInteractions(out); verifyNoMoreInteractions(err); } // check expressions are called in the correct order @Test public void processArgumentsNoDescend() throws IOException { LinkedList<PathData> items = createDirectories(); Find find = new Find(); find.setConf(conf); PrintStream out = mock(PrintStream.class); find.getOptions().setOut(out); PrintStream err = mock(PrintStream.class); find.getOptions().setErr(err); Expression expr = mock(Expression.class); when(expr.apply((PathData) any(), anyInt())).thenReturn(Result.PASS); when(expr.apply(eq(item1a), anyInt())).thenReturn(Result.STOP); FileStatusChecker fsCheck = mock(FileStatusChecker.class); Expression test = new TestExpression(expr, fsCheck); find.setRootExpression(test); find.processArguments(items); InOrder inOrder = inOrder(expr); inOrder.verify(expr).setOptions(find.getOptions()); inOrder.verify(expr).prepare(); inOrder.verify(expr).apply(item1, 0); inOrder.verify(expr).apply(item1a, 1); inOrder.verify(expr).apply(item1b, 1); inOrder.verify(expr).apply(item2, 0); inOrder.verify(expr).apply(item3, 0); inOrder.verify(expr).apply(item4, 0); inOrder.verify(expr).apply(item5, 0); inOrder.verify(expr).apply(item5a, 1); inOrder.verify(expr).apply(item5b, 1); inOrder.verify(expr).apply(item5c, 1); inOrder.verify(expr).apply(item5ca, 2); inOrder.verify(expr).apply(item5d, 1); inOrder.verify(expr).apply(item5e, 1); inOrder.verify(expr).finish(); verifyNoMoreInteractions(expr); InOrder inOrderFsCheck = inOrder(fsCheck); inOrderFsCheck.verify(fsCheck).check(item1.stat); inOrderFsCheck.verify(fsCheck).check(item1a.stat); inOrderFsCheck.verify(fsCheck).check(item1b.stat); inOrderFsCheck.verify(fsCheck).check(item2.stat); inOrderFsCheck.verify(fsCheck).check(item3.stat); inOrderFsCheck.verify(fsCheck).check(item4.stat); inOrderFsCheck.verify(fsCheck).check(item5.stat); inOrderFsCheck.verify(fsCheck).check(item5a.stat); inOrderFsCheck.verify(fsCheck).check(item5b.stat); inOrderFsCheck.verify(fsCheck).check(item5c.stat); inOrderFsCheck.verify(fsCheck).check(item5ca.stat); inOrderFsCheck.verify(fsCheck).check(item5d.stat); inOrderFsCheck.verify(fsCheck).check(item5e.stat); verifyNoMoreInteractions(fsCheck); verifyNoMoreInteractions(out); verifyNoMoreInteractions(err); } private interface FileStatusChecker { public void check(FileStatus fileStatus); } private class TestExpression extends BaseExpression implements Expression { private Expression expr; private FileStatusChecker checker; public TestExpression(Expression expr, FileStatusChecker checker) { this.expr = expr; this.checker = checker; } @Override public Result apply(PathData item, int depth) throws IOException { FileStatus fileStatus = getFileStatus(item, depth); checker.check(fileStatus); return expr.apply(item, depth); } @Override public void setOptions(FindOptions options) throws IOException { super.setOptions(options); expr.setOptions(options); } @Override public void prepare() throws IOException { expr.prepare(); } @Override public void finish() throws IOException { expr.finish(); } } // creates a directory structure for traversal // item1 (directory) // \- item1a (directory) // \- item1aa (file) // \- item1b (file) // item2 (directory) // item3 (file) // item4 (link) -> item3 // item5 (directory) // \- item5a (link) -> item1b // \- item5b (link) -> item5 (infinite loop) // \- item5c (directory) // \- item5ca (file) // \- item5d (link) -> item5c // \- item5e (link) -> item5c/item5ca private PathData item1 = null; private PathData item1a = null; private PathData item1aa = null; private PathData item1b = null; private PathData item2 = null; private PathData item3 = null; private PathData item4 = null; private PathData item5 = null; private PathData item5a = null; private PathData item5b = null; private PathData item5c = null; private PathData item5ca = null; private PathData item5d = null; private PathData item5e = null; private LinkedList<PathData> createDirectories() throws IOException { item1 = createPathData("item1"); item1a = createPathData("item1/item1a"); item1aa = createPathData("item1/item1a/item1aa"); item1b = createPathData("item1/item1b"); item2 = createPathData("item2"); item3 = createPathData("item3"); item4 = createPathData("item4"); item5 = createPathData("item5"); item5a = createPathData("item5/item5a"); item5b = createPathData("item5/item5b"); item5c = createPathData("item5/item5c"); item5ca = createPathData("item5/item5c/item5ca"); item5d = createPathData("item5/item5d"); item5e = createPathData("item5/item5e"); LinkedList<PathData> args = new LinkedList<PathData>(); when(item1.stat.isDirectory()).thenReturn(true); when(item1a.stat.isDirectory()).thenReturn(true); when(item1aa.stat.isDirectory()).thenReturn(false); when(item1b.stat.isDirectory()).thenReturn(false); when(item2.stat.isDirectory()).thenReturn(true); when(item3.stat.isDirectory()).thenReturn(false); when(item4.stat.isDirectory()).thenReturn(false); when(item5.stat.isDirectory()).thenReturn(true); when(item5a.stat.isDirectory()).thenReturn(false); when(item5b.stat.isDirectory()).thenReturn(false); when(item5c.stat.isDirectory()).thenReturn(true); when(item5ca.stat.isDirectory()).thenReturn(false); when(item5d.stat.isDirectory()).thenReturn(false); when(item5e.stat.isDirectory()).thenReturn(false); when(mockFs.listStatus(eq(item1.path))).thenReturn( new FileStatus[] { item1a.stat, item1b.stat }); when(mockFs.listStatus(eq(item1a.path))).thenReturn( new FileStatus[] { item1aa.stat }); when(mockFs.listStatus(eq(item2.path))).thenReturn(new FileStatus[0]); when(mockFs.listStatus(eq(item5.path))).thenReturn( new FileStatus[] { item5a.stat, item5b.stat, item5c.stat, item5d.stat, item5e.stat }); when(mockFs.listStatus(eq(item5c.path))).thenReturn( new FileStatus[] { item5ca.stat }); when(mockFs.listStatusIterator(Mockito.any(Path.class))) .thenAnswer(new Answer<RemoteIterator<FileStatus>>() { @Override public RemoteIterator<FileStatus> answer(InvocationOnMock invocation) throws Throwable { final Path p = (Path) invocation.getArguments()[0]; final FileStatus[] stats = mockFs.listStatus(p); return new RemoteIterator<FileStatus>() { private int i = 0; @Override public boolean hasNext() throws IOException { return i < stats.length; } @Override public FileStatus next() throws IOException { if (!hasNext()) { throw new NoSuchElementException("No more entry in " + p); } return stats[i++]; } }; } }); when(item1.stat.isSymlink()).thenReturn(false); when(item1a.stat.isSymlink()).thenReturn(false); when(item1aa.stat.isSymlink()).thenReturn(false); when(item1b.stat.isSymlink()).thenReturn(false); when(item2.stat.isSymlink()).thenReturn(false); when(item3.stat.isSymlink()).thenReturn(false); when(item4.stat.isSymlink()).thenReturn(true); when(item5.stat.isSymlink()).thenReturn(false); when(item5a.stat.isSymlink()).thenReturn(true); when(item5b.stat.isSymlink()).thenReturn(true); when(item5d.stat.isSymlink()).thenReturn(true); when(item5e.stat.isSymlink()).thenReturn(true); when(item4.stat.getSymlink()).thenReturn(item3.path); when(item5a.stat.getSymlink()).thenReturn(item1b.path); when(item5b.stat.getSymlink()).thenReturn(item5.path); when(item5d.stat.getSymlink()).thenReturn(item5c.path); when(item5e.stat.getSymlink()).thenReturn(item5ca.path); args.add(item1); args.add(item2); args.add(item3); args.add(item4); args.add(item5); return args; } private PathData createPathData(String name) throws IOException { Path path = new Path(name); FileStatus fstat = mock(FileStatus.class); when(fstat.getPath()).thenReturn(path); when(fstat.toString()).thenReturn("fileStatus:" + name); when(mockFs.getFileStatus(eq(path))).thenReturn(fstat); PathData item = new PathData(path.toString(), conf); return item; } private LinkedList<String> getArgs(String cmd) { return new LinkedList<String>(Arrays.asList(cmd.split(" "))); } }
apache/jackrabbit
35,787
jackrabbit-jcr-commons/src/main/java/org/apache/jackrabbit/commons/query/sql2/Parser.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.commons.query.sql2; import java.math.BigDecimal; import java.util.ArrayList; import java.util.HashMap; import javax.jcr.PropertyType; import javax.jcr.RepositoryException; import javax.jcr.Value; import javax.jcr.ValueFactory; import javax.jcr.query.InvalidQueryException; import javax.jcr.query.qom.BindVariableValue; import javax.jcr.query.qom.Column; import javax.jcr.query.qom.Constraint; import javax.jcr.query.qom.DynamicOperand; import javax.jcr.query.qom.JoinCondition; import javax.jcr.query.qom.Literal; import javax.jcr.query.qom.Ordering; import javax.jcr.query.qom.PropertyExistence; import javax.jcr.query.qom.PropertyValue; import javax.jcr.query.qom.QueryObjectModel; import javax.jcr.query.qom.QueryObjectModelFactory; import javax.jcr.query.qom.Selector; import javax.jcr.query.qom.Source; import javax.jcr.query.qom.StaticOperand; import org.apache.jackrabbit.commons.query.qom.JoinType; import org.apache.jackrabbit.commons.query.qom.Operator; /** * The SQL2 parser can convert a JCR-SQL2 query to a QueryObjectModel. */ public class Parser { // Character types, used during the tokenizer phase private static final int CHAR_END = -1, CHAR_VALUE = 2, CHAR_QUOTED = 3; private static final int CHAR_NAME = 4, CHAR_SPECIAL_1 = 5, CHAR_SPECIAL_2 = 6; private static final int CHAR_STRING = 7, CHAR_DECIMAL = 8; // Token types private static final int KEYWORD = 1, IDENTIFIER = 2, PARAMETER = 3, END = 4, VALUE = 5; private static final int MINUS = 12, PLUS = 13, OPEN = 14, CLOSE = 15; // The query as an array of characters and character types private String statement; private char[] statementChars; private int[] characterTypes; // The current state of the parser private int parseIndex; private int currentTokenType; private String currentToken; private boolean currentTokenQuoted; private Value currentValue; private ArrayList<String> expected; // The bind variables private HashMap<String, BindVariableValue> bindVariables; // The list of selectors of this query private ArrayList<Selector> selectors; // SQL injection protection: if disabled, literals are not allowed private boolean allowTextLiterals = true, allowNumberLiterals = true; private QueryObjectModelFactory factory; private ValueFactory valueFactory; /** * Create a new parser. A parser can be re-used, but it is not thread safe. * * @param factory the query object model factory * @param valueFactory the value factory */ public Parser(QueryObjectModelFactory factory, ValueFactory valueFactory) { this.factory = factory; this.valueFactory = valueFactory; } /** * Parse a JCR-SQL2 query and return the query object model * * @param query the query string * @return the query object model * @throws RepositoryException if parsing failed */ public QueryObjectModel createQueryObjectModel(String query) throws RepositoryException { initialize(query); selectors = new ArrayList<Selector>(); expected = new ArrayList<String>(); bindVariables = new HashMap<String, BindVariableValue>(); read(); read("SELECT"); int columnParseIndex = parseIndex; ArrayList<ColumnOrWildcard> list = parseColumns(); read("FROM"); Source source = parseSource(); Column[] columnArray = resolveColumns(columnParseIndex, list); Constraint constraint = null; if (readIf("WHERE")) { constraint = parseConstraint(); } Ordering[] orderings = null; if (readIf("ORDER")) { read("BY"); orderings = parseOrder(); } if (currentToken.length() > 0) { throw getSyntaxError("<end>"); } return factory.createQuery(source, constraint, orderings, columnArray); } private Selector parseSelector() throws RepositoryException { String nodeTypeName = readName(); if (readIf("AS")) { String selectorName = readName(); return factory.selector(nodeTypeName, selectorName); } else { return factory.selector(nodeTypeName, nodeTypeName); } } private String readName() throws RepositoryException { if (readIf("[")) { if (currentTokenType == VALUE) { Value value = readString(); read("]"); return value.getString(); } else { int level = 1; StringBuilder buff = new StringBuilder(); while (true) { if (isToken("]")) { if (--level <= 0) { read(); break; } } else if (isToken("[")) { level++; } buff.append(readAny()); } return buff.toString(); } } else { return readAny(); } } private Source parseSource() throws RepositoryException { Selector selector = parseSelector(); selectors.add(selector); Source source = selector; while (true) { JoinType type; if (readIf("RIGHT")) { read("OUTER"); type = JoinType.RIGHT; } else if (readIf("LEFT")) { read("OUTER"); type = JoinType.LEFT; } else if (readIf("INNER")) { type = JoinType.INNER; } else { break; } read("JOIN"); selector = parseSelector(); selectors.add(selector); read("ON"); JoinCondition on = parseJoinCondition(); source = type.join(factory, source, selector, on); } return source; } private JoinCondition parseJoinCondition() throws RepositoryException { boolean identifier = currentTokenType == IDENTIFIER; String name = readName(); JoinCondition c; if (identifier && readIf("(")) { if ("ISSAMENODE".equalsIgnoreCase(name)) { String selector1 = readName(); read(","); String selector2 = readName(); if (readIf(",")) { c = factory.sameNodeJoinCondition(selector1, selector2, readPath()); } else { c = factory.sameNodeJoinCondition(selector1, selector2, "."); } } else if ("ISCHILDNODE".equalsIgnoreCase(name)) { String childSelector = readName(); read(","); c = factory.childNodeJoinCondition(childSelector, readName()); } else if ("ISDESCENDANTNODE".equalsIgnoreCase(name)) { String descendantSelector = readName(); read(","); c = factory.descendantNodeJoinCondition(descendantSelector, readName()); } else { throw getSyntaxError("ISSAMENODE, ISCHILDNODE, or ISDESCENDANTNODE"); } read(")"); return c; } else { String selector1 = name; read("."); String property1 = readName(); read("="); String selector2 = readName(); read("."); return factory.equiJoinCondition(selector1, property1, selector2, readName()); } } private Constraint parseConstraint() throws RepositoryException { Constraint a = parseAnd(); while (readIf("OR")) { a = factory.or(a, parseAnd()); } return a; } private Constraint parseAnd() throws RepositoryException { Constraint a = parseCondition(); while (readIf("AND")) { a = factory.and(a, parseCondition()); } return a; } private Constraint parseCondition() throws RepositoryException { Constraint a; if (readIf("NOT")) { a = factory.not(parseConstraint()); } else if (readIf("(")) { a = parseConstraint(); read(")"); } else if (currentTokenType == IDENTIFIER) { String identifier = readName(); if (readIf("(")) { a = parseConditionFuntionIf(identifier); if (a == null) { DynamicOperand op = parseExpressionFunction(identifier); a = parseCondition(op); } } else if (readIf(".")) { a = parseCondition(factory.propertyValue(identifier, readName())); } else { a = parseCondition(factory.propertyValue(getOnlySelectorName(identifier), identifier)); } } else if ("[".equals(currentToken)) { String name = readName(); if (readIf(".")) { a = parseCondition(factory.propertyValue(name, readName())); } else { a = parseCondition(factory.propertyValue(getOnlySelectorName(name), name)); } } else { throw getSyntaxError(); } return a; } private Constraint parseCondition(DynamicOperand left) throws RepositoryException { Constraint c; if (readIf("=")) { c = Operator.EQ.comparison(factory, left, parseStaticOperand()); } else if (readIf("<>")) { c = Operator.NE.comparison(factory, left, parseStaticOperand()); } else if (readIf("<")) { c = Operator.LT.comparison(factory, left, parseStaticOperand()); } else if (readIf(">")) { c = Operator.GT.comparison(factory, left, parseStaticOperand()); } else if (readIf("<=")) { c = Operator.LE.comparison(factory, left, parseStaticOperand()); } else if (readIf(">=")) { c = Operator.GE.comparison(factory, left, parseStaticOperand()); } else if (readIf("LIKE")) { c = Operator.LIKE.comparison(factory, left, parseStaticOperand()); } else if (readIf("IS")) { boolean not = readIf("NOT"); read("NULL"); if (!(left instanceof PropertyValue)) { throw getSyntaxError("propertyName (NOT NULL is only supported for properties)"); } PropertyValue p = (PropertyValue) left; c = getPropertyExistence(p); if (!not) { c = factory.not(c); } } else if (readIf("NOT")) { if (readIf("IS")) { read("NULL"); if (!(left instanceof PropertyValue)) { throw new RepositoryException( "Only property values can be tested for NOT IS NULL; got: " + left.getClass().getName()); } PropertyValue pv = (PropertyValue) left; c = getPropertyExistence(pv); } else { read("LIKE"); c = factory.not(Operator.LIKE.comparison( factory, left, parseStaticOperand())); } } else { throw getSyntaxError(); } return c; } private PropertyExistence getPropertyExistence(PropertyValue p) throws InvalidQueryException, RepositoryException { return factory.propertyExistence(p.getSelectorName(), p.getPropertyName()); } private Constraint parseConditionFuntionIf(String functionName) throws RepositoryException { Constraint c; if ("CONTAINS".equalsIgnoreCase(functionName)) { String name = readName(); if (readIf(".")) { if (readIf("*")) { read(","); c = factory.fullTextSearch( name, null, parseStaticOperand()); } else { String selector = name; name = readName(); read(","); c = factory.fullTextSearch( selector, name, parseStaticOperand()); } } else { read(","); c = factory.fullTextSearch( getOnlySelectorName(name), name, parseStaticOperand()); } } else if ("ISSAMENODE".equalsIgnoreCase(functionName)) { String name = readName(); if (readIf(",")) { c = factory.sameNode(name, readPath()); } else { c = factory.sameNode(getOnlySelectorName(name), name); } } else if ("ISCHILDNODE".equalsIgnoreCase(functionName)) { String name = readName(); if (readIf(",")) { c = factory.childNode(name, readPath()); } else { c = factory.childNode(getOnlySelectorName(name), name); } } else if ("ISDESCENDANTNODE".equalsIgnoreCase(functionName)) { String name = readName(); if (readIf(",")) { c = factory.descendantNode(name, readPath()); } else { c = factory.descendantNode(getOnlySelectorName(name), name); } } else { return null; } read(")"); return c; } private String readPath() throws RepositoryException { return readName(); } private DynamicOperand parseDynamicOperand() throws RepositoryException { boolean identifier = currentTokenType == IDENTIFIER; String name = readName(); if (identifier && readIf("(")) { return parseExpressionFunction(name); } else { return parsePropertyValue(name); } } private DynamicOperand parseExpressionFunction(String functionName) throws RepositoryException { DynamicOperand op; if ("LENGTH".equalsIgnoreCase(functionName)) { op = factory.length(parsePropertyValue(readName())); } else if ("NAME".equalsIgnoreCase(functionName)) { if (isToken(")")) { op = factory.nodeName(getOnlySelectorName("NAME()")); } else { op = factory.nodeName(readName()); } } else if ("LOCALNAME".equalsIgnoreCase(functionName)) { if (isToken(")")) { op = factory.nodeLocalName(getOnlySelectorName("LOCALNAME()")); } else { op = factory.nodeLocalName(readName()); } } else if ("SCORE".equalsIgnoreCase(functionName)) { if (isToken(")")) { op = factory.fullTextSearchScore(getOnlySelectorName("SCORE()")); } else { op = factory.fullTextSearchScore(readName()); } } else if ("LOWER".equalsIgnoreCase(functionName)) { op = factory.lowerCase(parseDynamicOperand()); } else if ("UPPER".equalsIgnoreCase(functionName)) { op = factory.upperCase(parseDynamicOperand()); } else { throw getSyntaxError("LENGTH, NAME, LOCALNAME, SCORE, LOWER, UPPER, or CAST"); } read(")"); return op; } private PropertyValue parsePropertyValue(String name) throws RepositoryException { if (readIf(".")) { return factory.propertyValue(name, readName()); } else { return factory.propertyValue(getOnlySelectorName(name), name); } } private StaticOperand parseStaticOperand() throws RepositoryException { if (currentTokenType == PLUS) { read(); } else if (currentTokenType == MINUS) { read(); if (currentTokenType != VALUE) { throw getSyntaxError("number"); } int valueType = currentValue.getType(); switch (valueType) { case PropertyType.LONG: currentValue = valueFactory.createValue(-currentValue.getLong()); break; case PropertyType.DOUBLE: currentValue = valueFactory.createValue(-currentValue.getDouble()); break; case PropertyType.BOOLEAN: currentValue = valueFactory.createValue(!currentValue.getBoolean()); break; case PropertyType.DECIMAL: currentValue = valueFactory.createValue(currentValue.getDecimal().negate()); break; default: throw getSyntaxError("Illegal operation: -" + currentValue); } } if (currentTokenType == VALUE) { Literal literal = getUncastLiteral(currentValue); read(); return literal; } else if (currentTokenType == PARAMETER) { read(); String name = readName(); if (readIf(":")) { name = name + ":" + readName(); } BindVariableValue var = bindVariables.get(name); if (var == null) { var = factory.bindVariable(name); bindVariables.put(name, var); } return var; } else if (readIf("TRUE")) { Literal literal = getUncastLiteral(valueFactory.createValue(true)); return literal; } else if (readIf("FALSE")) { Literal literal = getUncastLiteral(valueFactory.createValue(false)); return literal; } else if (readIf("CAST")) { read("("); StaticOperand op = parseStaticOperand(); if (!(op instanceof Literal)) { throw getSyntaxError("literal"); } Literal literal = (Literal) op; Value value = literal.getLiteralValue(); read("AS"); value = parseCastAs(value); read(")"); // CastLiteral literal = factory.literal(value); return literal; } else { throw getSyntaxError("static operand"); } } /** * Create a literal from a parsed value. * * @param value the original value * @return the literal */ private Literal getUncastLiteral(Value value) throws RepositoryException { return factory.literal(value); } private Value parseCastAs(Value value) throws RepositoryException { if (readIf("STRING")) { return valueFactory.createValue(value.getString()); } else if(readIf("BINARY")) { return valueFactory.createValue(value.getBinary()); } else if(readIf("DATE")) { return valueFactory.createValue(value.getDate()); } else if(readIf("LONG")) { return valueFactory.createValue(value.getLong()); } else if(readIf("DOUBLE")) { return valueFactory.createValue(value.getDouble()); } else if(readIf("DECIMAL")) { return valueFactory.createValue(value.getDecimal()); } else if(readIf("BOOLEAN")) { return valueFactory.createValue(value.getBoolean()); } else if(readIf("NAME")) { return valueFactory.createValue(value.getString(), PropertyType.NAME); } else if(readIf("PATH")) { return valueFactory.createValue(value.getString(), PropertyType.PATH); } else if(readIf("REFERENCE")) { return valueFactory.createValue(value.getString(), PropertyType.REFERENCE); } else if(readIf("WEAKREFERENCE")) { return valueFactory.createValue(value.getString(), PropertyType.WEAKREFERENCE); } else if(readIf("URI")) { return valueFactory.createValue(value.getString(), PropertyType.URI); } else { throw getSyntaxError("data type (STRING|BINARY|...)"); } } private Ordering[] parseOrder() throws RepositoryException { ArrayList<Ordering> orderList = new ArrayList<Ordering>(); do { Ordering ordering; DynamicOperand op = parseDynamicOperand(); if (readIf("DESC")) { ordering = factory.descending(op); } else { readIf("ASC"); ordering = factory.ascending(op); } orderList.add(ordering); } while (readIf(",")); Ordering[] orderings = new Ordering[orderList.size()]; orderList.toArray(orderings); return orderings; } private ArrayList<ColumnOrWildcard> parseColumns() throws RepositoryException { ArrayList<ColumnOrWildcard> list = new ArrayList<ColumnOrWildcard>(); if (readIf("*")) { list.add(new ColumnOrWildcard()); } else { do { ColumnOrWildcard column = new ColumnOrWildcard(); column.propertyName = readName(); if (readIf(".")) { column.selectorName = column.propertyName; if (readIf("*")) { column.propertyName = null; } else { column.propertyName = readName(); if (readIf("AS")) { column.columnName = readName(); } else { column.columnName = column.selectorName + "." + column.propertyName; } } } else { if (readIf("AS")) { column.columnName = readName(); } } list.add(column); } while (readIf(",")); } return list; } private Column[] resolveColumns(int columnParseIndex, ArrayList<ColumnOrWildcard> list) throws RepositoryException { int oldParseIndex = parseIndex; // set the parse index to the column list, to get a more meaningful error message // if something is wrong this.parseIndex = columnParseIndex; try { ArrayList<Column> columns = new ArrayList<Column>(); for (ColumnOrWildcard c : list) { if (c.propertyName == null) { for (Selector selector : selectors) { if (c.selectorName == null || c.selectorName .equals(selector.getSelectorName())) { Column column = factory.column(selector .getSelectorName(), null, null); columns.add(column); } } } else { Column column; if (c.selectorName != null) { column = factory.column(c.selectorName, c.propertyName, c.columnName); } else if (c.columnName != null) { column = factory.column(getOnlySelectorName(c.propertyName), c.propertyName, c.columnName); } else { column = factory.column(getOnlySelectorName(c.propertyName), c.propertyName, c.propertyName); } columns.add(column); } } Column[] array = new Column[columns.size()]; columns.toArray(array); return array; } finally { this.parseIndex = oldParseIndex; } } private boolean readIf(String token) throws RepositoryException { if (isToken(token)) { read(); return true; } return false; } private boolean isToken(String token) { boolean result = token.equalsIgnoreCase(currentToken) && !currentTokenQuoted; if (result) { return true; } addExpected(token); return false; } private void read(String expected) throws RepositoryException { if (!expected.equalsIgnoreCase(currentToken) || currentTokenQuoted) { throw getSyntaxError(expected); } read(); } private String readAny() throws RepositoryException { if (currentTokenType == END) { throw getSyntaxError("a token"); } String s; if (currentTokenType == VALUE) { s = currentValue.getString(); } else { s = currentToken; } read(); return s; } private Value readString() throws RepositoryException { if (currentTokenType != VALUE) { throw getSyntaxError("string value"); } Value value = currentValue; read(); return value; } private void addExpected(String token) { if (expected != null) { expected.add(token); } } private void initialize(String query) throws InvalidQueryException { if (query == null) { query = ""; } statement = query; int len = query.length() + 1; char[] command = new char[len]; int[] types = new int[len]; len--; query.getChars(0, len, command, 0); command[len] = ' '; int startLoop = 0; for (int i = 0; i < len; i++) { char c = command[i]; int type = 0; switch (c) { case '/': case '-': case '(': case ')': case '{': case '}': case '*': case ',': case ';': case '+': case '%': case '?': case '$': case '[': case ']': type = CHAR_SPECIAL_1; break; case '!': case '<': case '>': case '|': case '=': case ':': type = CHAR_SPECIAL_2; break; case '.': type = CHAR_DECIMAL; break; case '\'': type = CHAR_STRING; types[i] = CHAR_STRING; startLoop = i; while (command[++i] != '\'') { checkRunOver(i, len, startLoop); } break; case '\"': type = CHAR_QUOTED; types[i] = CHAR_QUOTED; startLoop = i; while (command[++i] != '\"') { checkRunOver(i, len, startLoop); } break; case '_': type = CHAR_NAME; break; default: if (c >= 'a' && c <= 'z') { type = CHAR_NAME; } else if (c >= 'A' && c <= 'Z') { type = CHAR_NAME; } else if (c >= '0' && c <= '9') { type = CHAR_VALUE; } else { if (Character.isJavaIdentifierPart(c)) { type = CHAR_NAME; } } } types[i] = (byte) type; } statementChars = command; types[len] = CHAR_END; characterTypes = types; parseIndex = 0; } private void checkRunOver(int i, int len, int startLoop) throws InvalidQueryException { if (i >= len) { parseIndex = startLoop; throw getSyntaxError(); } } private void read() throws RepositoryException { currentTokenQuoted = false; if (expected != null) { expected.clear(); } int[] types = characterTypes; int i = parseIndex; int type = types[i]; while (type == 0) { type = types[++i]; } int start = i; char[] chars = statementChars; char c = chars[i++]; currentToken = ""; switch (type) { case CHAR_NAME: while (true) { type = types[i]; if (type != CHAR_NAME && type != CHAR_VALUE) { break; } i++; } currentToken = statement.substring(start, i); if (currentToken.length() == 0) { throw getSyntaxError(); } currentTokenType = IDENTIFIER; parseIndex = i; return; case CHAR_SPECIAL_2: if (types[i] == CHAR_SPECIAL_2) { i++; } // fall through case CHAR_SPECIAL_1: currentToken = statement.substring(start, i); switch (c) { case '$': currentTokenType = PARAMETER; break; case '+': currentTokenType = PLUS; break; case '-': currentTokenType = MINUS; break; case '(': currentTokenType = OPEN; break; case ')': currentTokenType = CLOSE; break; default: currentTokenType = KEYWORD; } parseIndex = i; return; case CHAR_VALUE: long number = c - '0'; while (true) { c = chars[i]; if (c < '0' || c > '9') { if (c == '.') { readDecimal(start, i); break; } if (c == 'E' || c == 'e') { readDecimal(start, i); break; } checkLiterals(false); currentValue = valueFactory.createValue(number); currentTokenType = VALUE; currentToken = "0"; parseIndex = i; break; } number = number * 10 + (c - '0'); if (number > Integer.MAX_VALUE) { readDecimal(start, i); break; } i++; } return; case CHAR_DECIMAL: if (types[i] != CHAR_VALUE) { currentTokenType = KEYWORD; currentToken = "."; parseIndex = i; return; } readDecimal(i - 1, i); return; case CHAR_STRING: readString(i, '\''); return; case CHAR_QUOTED: readString(i, '\"'); return; case CHAR_END: currentToken = ""; currentTokenType = END; parseIndex = i; return; default: throw getSyntaxError(); } } private void readString(int i, char end) throws RepositoryException { char[] chars = statementChars; String result = null; while (true) { for (int begin = i;; i++) { if (chars[i] == end) { if (result == null) { result = statement.substring(begin, i); } else { result += statement.substring(begin - 1, i); } break; } } if (chars[++i] != end) { break; } i++; } currentToken = "'"; checkLiterals(false); currentValue = valueFactory.createValue(result); parseIndex = i; currentTokenType = VALUE; } private void checkLiterals(boolean text) throws InvalidQueryException { if (text && !allowTextLiterals || (!text && !allowNumberLiterals)) { throw getSyntaxError("bind variable (literals of this type not allowed)"); } } private void readDecimal(int start, int i) throws RepositoryException { char[] chars = statementChars; int[] types = characterTypes; while (true) { int t = types[i]; if (t != CHAR_DECIMAL && t != CHAR_VALUE) { break; } i++; } if (chars[i] == 'E' || chars[i] == 'e') { i++; if (chars[i] == '+' || chars[i] == '-') { i++; } if (types[i] != CHAR_VALUE) { throw getSyntaxError(); } do { i++; // go until the first non-number } while (types[i] == CHAR_VALUE); } parseIndex = i; String sub = statement.substring(start, i); BigDecimal bd; try { bd = new BigDecimal(sub); } catch (NumberFormatException e) { throw new InvalidQueryException("Data conversion error converting " + sub + " to BigDecimal: " + e); } checkLiterals(false); currentValue = valueFactory.createValue(bd); currentTokenType = VALUE; } private InvalidQueryException getSyntaxError() { if (expected == null || expected.size() == 0) { return getSyntaxError(null); } else { StringBuilder buff = new StringBuilder(); for (String exp : expected) { if (buff.length() > 0) { buff.append(", "); } buff.append(exp); } return getSyntaxError(buff.toString()); } } private InvalidQueryException getSyntaxError(String expected) { int index = Math.min(parseIndex, statement.length() - 1); String query = statement.substring(0, index) + "(*)" + statement.substring(index).trim(); if (expected != null) { query += "; expected: " + expected; } return new InvalidQueryException("Query:\n" + query); } /** * Represents a column or a wildcard in a SQL expression. * This class is temporarily used during parsing. */ static class ColumnOrWildcard { String selectorName; String propertyName; String columnName; } /** * Get the selector name if only one selector exists in the query. * If more than one selector exists, an exception is thrown. * * @param name the property name * @return the selector name */ private String getOnlySelectorName(String propertyName) throws RepositoryException { if (selectors.size() > 1) { throw getSyntaxError("Need to specify the selector name for \"" + propertyName + "\" because the query contains more than one selector."); } return selectors.get(0).getSelectorName(); } }
googleapis/google-cloud-java
35,998
java-dlp/proto-google-cloud-dlp-v2/src/main/java/com/google/privacy/dlp/v2/ExcludeByHotword.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/privacy/dlp/v2/dlp.proto // Protobuf Java Version: 3.25.8 package com.google.privacy.dlp.v2; /** * * * <pre> * The rule to exclude findings based on a hotword. For record inspection of * tables, column names are considered hotwords. An example of this is to * exclude a finding if it belongs to a BigQuery column that matches a specific * pattern. * </pre> * * Protobuf type {@code google.privacy.dlp.v2.ExcludeByHotword} */ public final class ExcludeByHotword extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.privacy.dlp.v2.ExcludeByHotword) ExcludeByHotwordOrBuilder { private static final long serialVersionUID = 0L; // Use ExcludeByHotword.newBuilder() to construct. private ExcludeByHotword(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ExcludeByHotword() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ExcludeByHotword(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_ExcludeByHotword_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_ExcludeByHotword_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2.ExcludeByHotword.class, com.google.privacy.dlp.v2.ExcludeByHotword.Builder.class); } private int bitField0_; public static final int HOTWORD_REGEX_FIELD_NUMBER = 1; private com.google.privacy.dlp.v2.CustomInfoType.Regex hotwordRegex_; /** * * * <pre> * Regular expression pattern defining what qualifies as a hotword. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.Regex hotword_regex = 1;</code> * * @return Whether the hotwordRegex field is set. */ @java.lang.Override public boolean hasHotwordRegex() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Regular expression pattern defining what qualifies as a hotword. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.Regex hotword_regex = 1;</code> * * @return The hotwordRegex. */ @java.lang.Override public com.google.privacy.dlp.v2.CustomInfoType.Regex getHotwordRegex() { return hotwordRegex_ == null ? com.google.privacy.dlp.v2.CustomInfoType.Regex.getDefaultInstance() : hotwordRegex_; } /** * * * <pre> * Regular expression pattern defining what qualifies as a hotword. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.Regex hotword_regex = 1;</code> */ @java.lang.Override public com.google.privacy.dlp.v2.CustomInfoType.RegexOrBuilder getHotwordRegexOrBuilder() { return hotwordRegex_ == null ? com.google.privacy.dlp.v2.CustomInfoType.Regex.getDefaultInstance() : hotwordRegex_; } public static final int PROXIMITY_FIELD_NUMBER = 2; private com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity proximity_; /** * * * <pre> * Range of characters within which the entire hotword must reside. * The total length of the window cannot exceed 1000 characters. * The windowBefore property in proximity should be set to 1 if the hotword * needs to be included in a column header. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity proximity = 2;</code> * * @return Whether the proximity field is set. */ @java.lang.Override public boolean hasProximity() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Range of characters within which the entire hotword must reside. * The total length of the window cannot exceed 1000 characters. * The windowBefore property in proximity should be set to 1 if the hotword * needs to be included in a column header. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity proximity = 2;</code> * * @return The proximity. */ @java.lang.Override public com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity getProximity() { return proximity_ == null ? com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity.getDefaultInstance() : proximity_; } /** * * * <pre> * Range of characters within which the entire hotword must reside. * The total length of the window cannot exceed 1000 characters. * The windowBefore property in proximity should be set to 1 if the hotword * needs to be included in a column header. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity proximity = 2;</code> */ @java.lang.Override public com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.ProximityOrBuilder getProximityOrBuilder() { return proximity_ == null ? com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity.getDefaultInstance() : proximity_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getHotwordRegex()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getProximity()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getHotwordRegex()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getProximity()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.privacy.dlp.v2.ExcludeByHotword)) { return super.equals(obj); } com.google.privacy.dlp.v2.ExcludeByHotword other = (com.google.privacy.dlp.v2.ExcludeByHotword) obj; if (hasHotwordRegex() != other.hasHotwordRegex()) return false; if (hasHotwordRegex()) { if (!getHotwordRegex().equals(other.getHotwordRegex())) return false; } if (hasProximity() != other.hasProximity()) return false; if (hasProximity()) { if (!getProximity().equals(other.getProximity())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasHotwordRegex()) { hash = (37 * hash) + HOTWORD_REGEX_FIELD_NUMBER; hash = (53 * hash) + getHotwordRegex().hashCode(); } if (hasProximity()) { hash = (37 * hash) + PROXIMITY_FIELD_NUMBER; hash = (53 * hash) + getProximity().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.privacy.dlp.v2.ExcludeByHotword parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.ExcludeByHotword parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.ExcludeByHotword parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.ExcludeByHotword parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.ExcludeByHotword parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.privacy.dlp.v2.ExcludeByHotword parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.privacy.dlp.v2.ExcludeByHotword parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.ExcludeByHotword parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2.ExcludeByHotword parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.ExcludeByHotword parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.privacy.dlp.v2.ExcludeByHotword parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.privacy.dlp.v2.ExcludeByHotword parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.privacy.dlp.v2.ExcludeByHotword prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The rule to exclude findings based on a hotword. For record inspection of * tables, column names are considered hotwords. An example of this is to * exclude a finding if it belongs to a BigQuery column that matches a specific * pattern. * </pre> * * Protobuf type {@code google.privacy.dlp.v2.ExcludeByHotword} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.privacy.dlp.v2.ExcludeByHotword) com.google.privacy.dlp.v2.ExcludeByHotwordOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_ExcludeByHotword_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_ExcludeByHotword_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.privacy.dlp.v2.ExcludeByHotword.class, com.google.privacy.dlp.v2.ExcludeByHotword.Builder.class); } // Construct using com.google.privacy.dlp.v2.ExcludeByHotword.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getHotwordRegexFieldBuilder(); getProximityFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; hotwordRegex_ = null; if (hotwordRegexBuilder_ != null) { hotwordRegexBuilder_.dispose(); hotwordRegexBuilder_ = null; } proximity_ = null; if (proximityBuilder_ != null) { proximityBuilder_.dispose(); proximityBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.privacy.dlp.v2.DlpProto .internal_static_google_privacy_dlp_v2_ExcludeByHotword_descriptor; } @java.lang.Override public com.google.privacy.dlp.v2.ExcludeByHotword getDefaultInstanceForType() { return com.google.privacy.dlp.v2.ExcludeByHotword.getDefaultInstance(); } @java.lang.Override public com.google.privacy.dlp.v2.ExcludeByHotword build() { com.google.privacy.dlp.v2.ExcludeByHotword result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.privacy.dlp.v2.ExcludeByHotword buildPartial() { com.google.privacy.dlp.v2.ExcludeByHotword result = new com.google.privacy.dlp.v2.ExcludeByHotword(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.privacy.dlp.v2.ExcludeByHotword result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.hotwordRegex_ = hotwordRegexBuilder_ == null ? hotwordRegex_ : hotwordRegexBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.proximity_ = proximityBuilder_ == null ? proximity_ : proximityBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.privacy.dlp.v2.ExcludeByHotword) { return mergeFrom((com.google.privacy.dlp.v2.ExcludeByHotword) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.privacy.dlp.v2.ExcludeByHotword other) { if (other == com.google.privacy.dlp.v2.ExcludeByHotword.getDefaultInstance()) return this; if (other.hasHotwordRegex()) { mergeHotwordRegex(other.getHotwordRegex()); } if (other.hasProximity()) { mergeProximity(other.getProximity()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getHotwordRegexFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getProximityFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.privacy.dlp.v2.CustomInfoType.Regex hotwordRegex_; private com.google.protobuf.SingleFieldBuilderV3< com.google.privacy.dlp.v2.CustomInfoType.Regex, com.google.privacy.dlp.v2.CustomInfoType.Regex.Builder, com.google.privacy.dlp.v2.CustomInfoType.RegexOrBuilder> hotwordRegexBuilder_; /** * * * <pre> * Regular expression pattern defining what qualifies as a hotword. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.Regex hotword_regex = 1;</code> * * @return Whether the hotwordRegex field is set. */ public boolean hasHotwordRegex() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Regular expression pattern defining what qualifies as a hotword. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.Regex hotword_regex = 1;</code> * * @return The hotwordRegex. */ public com.google.privacy.dlp.v2.CustomInfoType.Regex getHotwordRegex() { if (hotwordRegexBuilder_ == null) { return hotwordRegex_ == null ? com.google.privacy.dlp.v2.CustomInfoType.Regex.getDefaultInstance() : hotwordRegex_; } else { return hotwordRegexBuilder_.getMessage(); } } /** * * * <pre> * Regular expression pattern defining what qualifies as a hotword. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.Regex hotword_regex = 1;</code> */ public Builder setHotwordRegex(com.google.privacy.dlp.v2.CustomInfoType.Regex value) { if (hotwordRegexBuilder_ == null) { if (value == null) { throw new NullPointerException(); } hotwordRegex_ = value; } else { hotwordRegexBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Regular expression pattern defining what qualifies as a hotword. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.Regex hotword_regex = 1;</code> */ public Builder setHotwordRegex( com.google.privacy.dlp.v2.CustomInfoType.Regex.Builder builderForValue) { if (hotwordRegexBuilder_ == null) { hotwordRegex_ = builderForValue.build(); } else { hotwordRegexBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Regular expression pattern defining what qualifies as a hotword. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.Regex hotword_regex = 1;</code> */ public Builder mergeHotwordRegex(com.google.privacy.dlp.v2.CustomInfoType.Regex value) { if (hotwordRegexBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && hotwordRegex_ != null && hotwordRegex_ != com.google.privacy.dlp.v2.CustomInfoType.Regex.getDefaultInstance()) { getHotwordRegexBuilder().mergeFrom(value); } else { hotwordRegex_ = value; } } else { hotwordRegexBuilder_.mergeFrom(value); } if (hotwordRegex_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Regular expression pattern defining what qualifies as a hotword. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.Regex hotword_regex = 1;</code> */ public Builder clearHotwordRegex() { bitField0_ = (bitField0_ & ~0x00000001); hotwordRegex_ = null; if (hotwordRegexBuilder_ != null) { hotwordRegexBuilder_.dispose(); hotwordRegexBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Regular expression pattern defining what qualifies as a hotword. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.Regex hotword_regex = 1;</code> */ public com.google.privacy.dlp.v2.CustomInfoType.Regex.Builder getHotwordRegexBuilder() { bitField0_ |= 0x00000001; onChanged(); return getHotwordRegexFieldBuilder().getBuilder(); } /** * * * <pre> * Regular expression pattern defining what qualifies as a hotword. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.Regex hotword_regex = 1;</code> */ public com.google.privacy.dlp.v2.CustomInfoType.RegexOrBuilder getHotwordRegexOrBuilder() { if (hotwordRegexBuilder_ != null) { return hotwordRegexBuilder_.getMessageOrBuilder(); } else { return hotwordRegex_ == null ? com.google.privacy.dlp.v2.CustomInfoType.Regex.getDefaultInstance() : hotwordRegex_; } } /** * * * <pre> * Regular expression pattern defining what qualifies as a hotword. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.Regex hotword_regex = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.privacy.dlp.v2.CustomInfoType.Regex, com.google.privacy.dlp.v2.CustomInfoType.Regex.Builder, com.google.privacy.dlp.v2.CustomInfoType.RegexOrBuilder> getHotwordRegexFieldBuilder() { if (hotwordRegexBuilder_ == null) { hotwordRegexBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.privacy.dlp.v2.CustomInfoType.Regex, com.google.privacy.dlp.v2.CustomInfoType.Regex.Builder, com.google.privacy.dlp.v2.CustomInfoType.RegexOrBuilder>( getHotwordRegex(), getParentForChildren(), isClean()); hotwordRegex_ = null; } return hotwordRegexBuilder_; } private com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity proximity_; private com.google.protobuf.SingleFieldBuilderV3< com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity, com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity.Builder, com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.ProximityOrBuilder> proximityBuilder_; /** * * * <pre> * Range of characters within which the entire hotword must reside. * The total length of the window cannot exceed 1000 characters. * The windowBefore property in proximity should be set to 1 if the hotword * needs to be included in a column header. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity proximity = 2;</code> * * @return Whether the proximity field is set. */ public boolean hasProximity() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Range of characters within which the entire hotword must reside. * The total length of the window cannot exceed 1000 characters. * The windowBefore property in proximity should be set to 1 if the hotword * needs to be included in a column header. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity proximity = 2;</code> * * @return The proximity. */ public com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity getProximity() { if (proximityBuilder_ == null) { return proximity_ == null ? com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity.getDefaultInstance() : proximity_; } else { return proximityBuilder_.getMessage(); } } /** * * * <pre> * Range of characters within which the entire hotword must reside. * The total length of the window cannot exceed 1000 characters. * The windowBefore property in proximity should be set to 1 if the hotword * needs to be included in a column header. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity proximity = 2;</code> */ public Builder setProximity( com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity value) { if (proximityBuilder_ == null) { if (value == null) { throw new NullPointerException(); } proximity_ = value; } else { proximityBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Range of characters within which the entire hotword must reside. * The total length of the window cannot exceed 1000 characters. * The windowBefore property in proximity should be set to 1 if the hotword * needs to be included in a column header. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity proximity = 2;</code> */ public Builder setProximity( com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity.Builder builderForValue) { if (proximityBuilder_ == null) { proximity_ = builderForValue.build(); } else { proximityBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Range of characters within which the entire hotword must reside. * The total length of the window cannot exceed 1000 characters. * The windowBefore property in proximity should be set to 1 if the hotword * needs to be included in a column header. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity proximity = 2;</code> */ public Builder mergeProximity( com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity value) { if (proximityBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && proximity_ != null && proximity_ != com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity .getDefaultInstance()) { getProximityBuilder().mergeFrom(value); } else { proximity_ = value; } } else { proximityBuilder_.mergeFrom(value); } if (proximity_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Range of characters within which the entire hotword must reside. * The total length of the window cannot exceed 1000 characters. * The windowBefore property in proximity should be set to 1 if the hotword * needs to be included in a column header. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity proximity = 2;</code> */ public Builder clearProximity() { bitField0_ = (bitField0_ & ~0x00000002); proximity_ = null; if (proximityBuilder_ != null) { proximityBuilder_.dispose(); proximityBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Range of characters within which the entire hotword must reside. * The total length of the window cannot exceed 1000 characters. * The windowBefore property in proximity should be set to 1 if the hotword * needs to be included in a column header. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity proximity = 2;</code> */ public com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity.Builder getProximityBuilder() { bitField0_ |= 0x00000002; onChanged(); return getProximityFieldBuilder().getBuilder(); } /** * * * <pre> * Range of characters within which the entire hotword must reside. * The total length of the window cannot exceed 1000 characters. * The windowBefore property in proximity should be set to 1 if the hotword * needs to be included in a column header. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity proximity = 2;</code> */ public com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.ProximityOrBuilder getProximityOrBuilder() { if (proximityBuilder_ != null) { return proximityBuilder_.getMessageOrBuilder(); } else { return proximity_ == null ? com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity.getDefaultInstance() : proximity_; } } /** * * * <pre> * Range of characters within which the entire hotword must reside. * The total length of the window cannot exceed 1000 characters. * The windowBefore property in proximity should be set to 1 if the hotword * needs to be included in a column header. * </pre> * * <code>.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity proximity = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity, com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity.Builder, com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.ProximityOrBuilder> getProximityFieldBuilder() { if (proximityBuilder_ == null) { proximityBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity, com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.Proximity.Builder, com.google.privacy.dlp.v2.CustomInfoType.DetectionRule.ProximityOrBuilder>( getProximity(), getParentForChildren(), isClean()); proximity_ = null; } return proximityBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.privacy.dlp.v2.ExcludeByHotword) } // @@protoc_insertion_point(class_scope:google.privacy.dlp.v2.ExcludeByHotword) private static final com.google.privacy.dlp.v2.ExcludeByHotword DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.privacy.dlp.v2.ExcludeByHotword(); } public static com.google.privacy.dlp.v2.ExcludeByHotword getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ExcludeByHotword> PARSER = new com.google.protobuf.AbstractParser<ExcludeByHotword>() { @java.lang.Override public ExcludeByHotword parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ExcludeByHotword> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ExcludeByHotword> getParserForType() { return PARSER; } @java.lang.Override public com.google.privacy.dlp.v2.ExcludeByHotword getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/hive
35,678
standalone-metastore/metastore-server/src/main/java/org/apache/hadoop/hive/metastore/tools/MetaToolObjectStore.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * <p> * http://www.apache.org/licenses/LICENSE-2.0 * <p> * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hive.metastore.tools; import javax.jdo.Query; import java.lang.reflect.Modifier; import java.net.URI; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import org.apache.commons.lang3.tuple.Pair; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hive.common.StatsSetupConst; import org.apache.hadoop.hive.metastore.Batchable; import org.apache.hadoop.hive.metastore.Deadline; import org.apache.hadoop.hive.metastore.ObjectStore; import org.apache.hadoop.hive.metastore.QueryWrapper; import org.apache.hadoop.hive.metastore.TableType; import org.apache.hadoop.hive.metastore.api.MetaException; import org.apache.hadoop.hive.metastore.model.MDatabase; import org.apache.hadoop.hive.metastore.model.MSerDeInfo; import org.apache.hadoop.hive.metastore.model.MStorageDescriptor; import org.apache.hadoop.hive.metastore.model.MTable; import org.apache.hadoop.hive.metastore.metasummary.MetadataTableSummary; import org.apache.hadoop.hive.metastore.utils.StringUtils; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.apache.commons.lang3.StringUtils.repeat; import static org.apache.hadoop.hive.metastore.Batchable.runBatched; /** * This class should be used in metatool only */ public class MetaToolObjectStore extends ObjectStore { private static final Logger LOG = LoggerFactory.getLogger(MetaToolObjectStore.class); public static class TableFormat { private TableFormat() { // private constructor } public static final String PARQUET = "parquet"; public static final String ORC = "orc"; public static final String AVRO = "avro"; public static final String JSON = "json"; public static final String HBASE = "hbase"; public static final String JDBC = "jdbc"; public static final String KUDU = "kudu"; public static final String ICEBERG = "iceberg"; public static final String TEXT = "text"; public static final String SEQUENCE = "sequence"; public static final String OPENCSV = "opencsv"; public static final String LAZY_SIMPLE = "lazysimple"; public static final String PASS_THROUGH = "passthrough"; private static final Set<String> AVAILABLE_FORMATS = new HashSet<>(); static { Arrays.stream(TableFormat.class.getDeclaredFields()) .filter(field -> Modifier.isStatic(field.getModifiers()) && Modifier.isPublic(field.getModifiers())) .map(field -> { try { return field.get(null); } catch (IllegalAccessException e) { throw new AssertionError("This should not happen"); } }) .filter(String.class::isInstance) .forEach(res -> AVAILABLE_FORMATS.add((String) res)); } public static Set<String> getNonNativeFormats() { return new HashSet<>(Arrays.asList(HBASE, JDBC, KUDU, ICEBERG)); } /** * Helper method for getMetadataSummary. Extracting the format of the file from the long string. * @param fileFormat - fileFormat. A long String which indicates the type of the file. * @return String A short String which indicates the type of the file. */ public static String extractFileFormat(String fileFormat) { if (fileFormat == null) { return "NULL"; } final String lowerCaseFileFormat = fileFormat.toLowerCase(); Optional<String> result = AVAILABLE_FORMATS.stream().filter(lowerCaseFileFormat::contains).findFirst(); if (result.isPresent()) { String file = result.get(); if (OPENCSV.equals(file)) { return "openCSV"; } else if (LAZY_SIMPLE.equals(file)) { return TEXT; } return file; } return fileFormat; } public static boolean isIcebergFormat(String tableFormat) { return ICEBERG.equalsIgnoreCase(tableFormat); } } /** The following API * * - executeJDOQLSelect * * is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift. * */ public Collection<?> executeJDOQLSelect(String queryStr) throws Exception { boolean committed = false; Collection<?> result = null; try { openTransaction(); try (Query query = pm.newQuery(queryStr)) { result = Collections.unmodifiableCollection(new ArrayList<>(((Collection<?>) query.execute()))); } committed = commitTransaction(); } finally { if (!committed) { result = null; rollbackTransaction(); } } return result; } /** The following API * * - executeJDOQLUpdate * * is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift. * */ public long executeJDOQLUpdate(String queryStr) throws Exception { boolean committed = false; long numUpdated = 0L; try { openTransaction(); try (Query query = pm.newQuery(queryStr)) { numUpdated = (Long) query.execute(); } committed = commitTransaction(); if (committed) { return numUpdated; } } finally { rollbackAndCleanup(committed, null); } return -1L; } /** The following API * * - listFSRoots * * is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift. * */ public Set<String> listFSRoots() { boolean committed = false; Query query = null; Set<String> fsRoots = new HashSet<>(); try { openTransaction(); query = pm.newQuery(MDatabase.class); List<MDatabase> mDBs = (List<MDatabase>) query.execute(); pm.retrieveAll(mDBs); for (MDatabase mDB : mDBs) { fsRoots.add(mDB.getLocationUri()); } committed = commitTransaction(); if (committed) { return fsRoots; } else { return null; } } finally { rollbackAndCleanup(committed, query); } } private boolean shouldUpdateURI(URI onDiskUri, URI inputUri) { String onDiskHost = onDiskUri.getHost(); String inputHost = inputUri.getHost(); int onDiskPort = onDiskUri.getPort(); int inputPort = inputUri.getPort(); String onDiskScheme = onDiskUri.getScheme(); String inputScheme = inputUri.getScheme(); //compare ports if (inputPort != -1) { if (inputPort != onDiskPort) { return false; } } //compare schemes if (inputScheme != null) { if (onDiskScheme == null) { return false; } if (!inputScheme.equalsIgnoreCase(onDiskScheme)) { return false; } } //compare hosts if (onDiskHost != null) { return inputHost.equalsIgnoreCase(onDiskHost); } else { return false; } } public class UpdateMDatabaseURIRetVal { private List<String> badRecords; private Map<String, String> updateLocations; UpdateMDatabaseURIRetVal(List<String> badRecords, Map<String, String> updateLocations) { this.badRecords = badRecords; this.updateLocations = updateLocations; } public List<String> getBadRecords() { return badRecords; } public void setBadRecords(List<String> badRecords) { this.badRecords = badRecords; } public Map<String, String> getUpdateLocations() { return updateLocations; } public void setUpdateLocations(Map<String, String> updateLocations) { this.updateLocations = updateLocations; } } /** The following APIs * * - updateMDatabaseURI * * is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift. * */ public UpdateMDatabaseURIRetVal updateMDatabaseURI(URI oldLoc, URI newLoc, boolean dryRun) { boolean committed = false; Query query = null; Map<String, String> updateLocations = new HashMap<>(); List<String> badRecords = new ArrayList<>(); UpdateMDatabaseURIRetVal retVal = null; try { openTransaction(); query = pm.newQuery(MDatabase.class); List<MDatabase> mDBs = (List<MDatabase>) query.execute(); pm.retrieveAll(mDBs); for (MDatabase mDB : mDBs) { URI locationURI = null; String location = mDB.getLocationUri(); try { locationURI = new Path(location).toUri(); } catch (IllegalArgumentException e) { badRecords.add(location); } if (locationURI == null) { badRecords.add(location); } else { if (shouldUpdateURI(locationURI, oldLoc)) { String dbLoc = mDB.getLocationUri().replaceAll(oldLoc.toString(), newLoc.toString()); updateLocations.put(locationURI.toString(), dbLoc); if (!dryRun) { mDB.setLocationUri(dbLoc); } } } // if managed location is set, perform location update for managed location URI as well if (org.apache.commons.lang3.StringUtils.isNotBlank(mDB.getManagedLocationUri())) { URI managedLocationURI = null; String managedLocation = mDB.getManagedLocationUri(); try { managedLocationURI = new Path(managedLocation).toUri(); } catch (IllegalArgumentException e) { badRecords.add(managedLocation); } if (managedLocationURI == null) { badRecords.add(managedLocation); } else { if (shouldUpdateURI(managedLocationURI, oldLoc)) { String dbLoc = mDB.getManagedLocationUri().replaceAll(oldLoc.toString(), newLoc.toString()); updateLocations.put(managedLocationURI.toString(), dbLoc); if (!dryRun) { mDB.setManagedLocationUri(dbLoc); } } } } } committed = commitTransaction(); if (committed) { retVal = new UpdateMDatabaseURIRetVal(badRecords, updateLocations); } return retVal; } finally { rollbackAndCleanup(committed, query); } } public class UpdatePropURIRetVal { private List<String> badRecords; private Map<String, String> updateLocations; UpdatePropURIRetVal(List<String> badRecords, Map<String, String> updateLocations) { this.badRecords = badRecords; this.updateLocations = updateLocations; } public List<String> getBadRecords() { return badRecords; } public void setBadRecords(List<String> badRecords) { this.badRecords = badRecords; } public Map<String, String> getUpdateLocations() { return updateLocations; } public void setUpdateLocations(Map<String, String> updateLocations) { this.updateLocations = updateLocations; } } private void updatePropURIHelper(URI oldLoc, URI newLoc, String tblPropKey, boolean isDryRun, List<String> badRecords, Map<String, String> updateLocations, Map<String, String> parameters) { URI tablePropLocationURI = null; if (parameters.containsKey(tblPropKey)) { String tablePropLocation = parameters.get(tblPropKey); try { tablePropLocationURI = new Path(tablePropLocation).toUri(); } catch (IllegalArgumentException e) { badRecords.add(tablePropLocation); } // if tablePropKey that was passed in lead to a valid URI resolution, update it if //parts of it match the old-NN-loc, else add to badRecords if (tablePropLocationURI == null) { badRecords.add(tablePropLocation); } else { if (shouldUpdateURI(tablePropLocationURI, oldLoc)) { String tblPropLoc = parameters.get(tblPropKey).replaceAll(oldLoc.toString(), newLoc .toString()); updateLocations.put(tablePropLocationURI.toString(), tblPropLoc); if (!isDryRun) { parameters.put(tblPropKey, tblPropLoc); } } } } } /** The following APIs * * - updateMStorageDescriptorTblPropURI * * is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift. * */ public UpdatePropURIRetVal updateTblPropURI(URI oldLoc, URI newLoc, String tblPropKey, boolean isDryRun) { boolean committed = false; Query query = null; Map<String, String> updateLocations = new HashMap<>(); List<String> badRecords = new ArrayList<>(); UpdatePropURIRetVal retVal = null; try { openTransaction(); query = pm.newQuery(MTable.class); List<MTable> mTbls = (List<MTable>) query.execute(); pm.retrieveAll(mTbls); for (MTable mTbl : mTbls) { updatePropURIHelper(oldLoc, newLoc, tblPropKey, isDryRun, badRecords, updateLocations, mTbl.getParameters()); } committed = commitTransaction(); if (committed) { retVal = new UpdatePropURIRetVal(badRecords, updateLocations); } return retVal; } finally { rollbackAndCleanup(committed, query); } } /** The following APIs * * - updateMStorageDescriptorTblPropURI * * is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift. * */ @Deprecated public UpdatePropURIRetVal updateMStorageDescriptorTblPropURI(URI oldLoc, URI newLoc, String tblPropKey, boolean isDryRun) { boolean committed = false; Query query = null; Map<String, String> updateLocations = new HashMap<>(); List<String> badRecords = new ArrayList<>(); UpdatePropURIRetVal retVal = null; try { openTransaction(); query = pm.newQuery(MStorageDescriptor.class); List<MStorageDescriptor> mSDSs = (List<MStorageDescriptor>) query.execute(); pm.retrieveAll(mSDSs); for (MStorageDescriptor mSDS : mSDSs) { updatePropURIHelper(oldLoc, newLoc, tblPropKey, isDryRun, badRecords, updateLocations, mSDS.getParameters()); } committed = commitTransaction(); if (committed) { retVal = new UpdatePropURIRetVal(badRecords, updateLocations); } return retVal; } finally { rollbackAndCleanup(committed, query); } } public class UpdateMStorageDescriptorTblURIRetVal { private List<String> badRecords; private Map<String, String> updateLocations; private int numNullRecords; UpdateMStorageDescriptorTblURIRetVal(List<String> badRecords, Map<String, String> updateLocations, int numNullRecords) { this.badRecords = badRecords; this.updateLocations = updateLocations; this.numNullRecords = numNullRecords; } public List<String> getBadRecords() { return badRecords; } public void setBadRecords(List<String> badRecords) { this.badRecords = badRecords; } public Map<String, String> getUpdateLocations() { return updateLocations; } public void setUpdateLocations(Map<String, String> updateLocations) { this.updateLocations = updateLocations; } public int getNumNullRecords() { return numNullRecords; } public void setNumNullRecords(int numNullRecords) { this.numNullRecords = numNullRecords; } } /** The following APIs * * - updateMStorageDescriptorTblURI * * is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift. * */ public UpdateMStorageDescriptorTblURIRetVal updateMStorageDescriptorTblURI(URI oldLoc, URI newLoc, boolean isDryRun) { boolean committed = false; Query query = null; Map<String, String> updateLocations = new HashMap<>(); List<String> badRecords = new ArrayList<>(); int numNullRecords = 0; UpdateMStorageDescriptorTblURIRetVal retVal = null; try { openTransaction(); query = pm.newQuery(MStorageDescriptor.class); List<MStorageDescriptor> mSDSs = (List<MStorageDescriptor>) query.execute(); pm.retrieveAll(mSDSs); for (MStorageDescriptor mSDS : mSDSs) { URI locationURI = null; String location = mSDS.getLocation(); if (location == null) { // This can happen for View or Index numNullRecords++; continue; } try { locationURI = new Path(location).toUri(); } catch (IllegalArgumentException e) { badRecords.add(location); } if (locationURI == null) { badRecords.add(location); } else { if (shouldUpdateURI(locationURI, oldLoc)) { String tblLoc = mSDS.getLocation().replaceAll(oldLoc.toString(), newLoc.toString()); updateLocations.put(locationURI.toString(), tblLoc); if (!isDryRun) { mSDS.setLocation(tblLoc); } } } } committed = commitTransaction(); if (committed) { retVal = new UpdateMStorageDescriptorTblURIRetVal(badRecords, updateLocations, numNullRecords); } return retVal; } finally { rollbackAndCleanup(committed, query); } } public class UpdateSerdeURIRetVal { private List<String> badRecords; private Map<String, String> updateLocations; UpdateSerdeURIRetVal(List<String> badRecords, Map<String, String> updateLocations) { this.badRecords = badRecords; this.updateLocations = updateLocations; } public List<String> getBadRecords() { return badRecords; } public void setBadRecords(List<String> badRecords) { this.badRecords = badRecords; } public Map<String, String> getUpdateLocations() { return updateLocations; } public void setUpdateLocations(Map<String, String> updateLocations) { this.updateLocations = updateLocations; } } /** The following APIs * * - updateSerdeURI * * is used by HiveMetaTool. This API **shouldn't** be exposed via Thrift. * */ public UpdateSerdeURIRetVal updateSerdeURI(URI oldLoc, URI newLoc, String serdeProp, boolean isDryRun) { boolean committed = false; Query query = null; Map<String, String> updateLocations = new HashMap<>(); List<String> badRecords = new ArrayList<>(); UpdateSerdeURIRetVal retVal = null; try { openTransaction(); query = pm.newQuery(MSerDeInfo.class); List<MSerDeInfo> mSerdes = (List<MSerDeInfo>) query.execute(); pm.retrieveAll(mSerdes); for (MSerDeInfo mSerde : mSerdes) { if (mSerde.getParameters().containsKey(serdeProp)) { String schemaLoc = mSerde.getParameters().get(serdeProp); URI schemaLocURI = null; try { schemaLocURI = new Path(schemaLoc).toUri(); } catch (IllegalArgumentException e) { badRecords.add(schemaLoc); } if (schemaLocURI == null) { badRecords.add(schemaLoc); } else { if (shouldUpdateURI(schemaLocURI, oldLoc)) { String newSchemaLoc = schemaLoc.replaceAll(oldLoc.toString(), newLoc.toString()); updateLocations.put(schemaLocURI.toString(), newSchemaLoc); if (!isDryRun) { mSerde.getParameters().put(serdeProp, newSchemaLoc); } } } } } committed = commitTransaction(); if (committed) { retVal = new UpdateSerdeURIRetVal(badRecords, updateLocations); } return retVal; } finally { rollbackAndCleanup(committed, query); } } /** * Using resultSet to read the HMS_SUMMARY table. * @param catalogFilter the optional catalog name filter * @param dbFilter the optional database name filter * @param tableFilter the optional table name filter * @return MetadataSummary * @throws MetaException */ public List<MetadataTableSummary> getMetadataSummary(String catalogFilter, String dbFilter, String tableFilter) throws MetaException { Set<Long> partedTabs = new HashSet<>(); Set<Long> nonPartedTabs = new HashSet<>(); Map<Long, MetadataTableSummary> summaries = new HashMap<>(); List<MetadataTableSummary> metadataTableSummaryList = new ArrayList<>(); StringBuilder filter = new StringBuilder(); List<String> parameterVals = new ArrayList<>(); if (!StringUtils.isEmpty(dbFilter)) { appendPatternCondition(filter, "database.name", dbFilter, parameterVals); } if (!StringUtils.isEmpty(tableFilter)) { appendPatternCondition(filter, "tableName", tableFilter, parameterVals); } try (QueryWrapper query = new QueryWrapper(filter.length() > 0 ? pm.newQuery(MTable.class, filter.toString()) : pm.newQuery(MTable.class))){ query.setResult("id, database.catalogName, database.name, tableName, owner, tableType"); List<Object[]> tables = (List<Object[]>) query.executeWithArray(parameterVals.toArray(new String[0])); for (Object[] table : tables) { Deadline.checkTimeout(); long tableId = Long.parseLong(table[0].toString()); MetadataTableSummary summary = new MetadataTableSummary(String.valueOf(table[1]), String.valueOf(table[2]), String.valueOf(table[3]), String.valueOf(table[4])); summary.setTableType(String.valueOf(table[5])); summary.setTableId(tableId); summaries.put(tableId, summary); metadataTableSummaryList.add(summary); } } collectColumnSummary(summaries); collectTabFormatSummary(summaries); collectPartitionSummary(summaries, partedTabs, nonPartedTabs); collectBasicStats(summaries, nonPartedTabs, partedTabs); return metadataTableSummaryList; } private void collectPartitionSummary(Map<Long, MetadataTableSummary> summaries, Set<Long> partedTabs, Set<Long> nonPartedTabs) throws MetaException { String queryText0 = "select \"TBL_ID\", count(1) from \"PARTITION_KEYS\" where \"TBL_ID\" in ("; runBatched(batchSize, new ArrayList<>(summaries.keySet()), new Batchable<Long, Void>() { @Override public List<Void> run(List<Long> input) throws Exception { Pair<Query<?>, List<Object[]>> qResult = getResultFromInput(input, queryText0, " group by \"TBL_ID\""); try { List<Object[]> result = qResult.getRight(); if (result != null) { for (Object[] fields : result) { Deadline.checkTimeout(); Long tabId = Long.parseLong(String.valueOf(fields[0])); MetadataTableSummary summary = summaries.get(tabId); summary.setPartitionColumnCount(Integer.parseInt(fields[1].toString())); partedTabs.add(tabId); } } summaries.keySet().stream().filter(k -> !partedTabs.contains(k)) .forEach(nonPartedTabs::add); } finally { qResult.getLeft().closeAll(); } return Collections.emptyList(); } }); String queryText1 = "select \"TBL_ID\", count(1) from \"PARTITIONS\" where \"TBL_ID\" in ("; runBatched(batchSize, new ArrayList<>(partedTabs), new Batchable<Long, Void>() { @Override public List<Void> run(List<Long> input) throws Exception { Pair<Query<?>, List<Object[]>> qResult = getResultFromInput(input, queryText1, " group by \"TBL_ID\""); try { List<Object[]> result = qResult.getRight(); if (result != null) { for (Object[] fields : result) { Deadline.checkTimeout(); Long tabId = Long.parseLong(String.valueOf(fields[0])); MetadataTableSummary summary = summaries.get(tabId); summary.setPartitionCount(Integer.parseInt(fields[1].toString())); } } } finally { qResult.getLeft().closeAll(); } return Collections.emptyList(); } }); } private void collectColumnSummary(Map<Long, MetadataTableSummary> summaries) throws MetaException { String queryText0 = "select \"TBL_ID\", count(*), sum(CASE WHEN \"TYPE_NAME\" like 'array%' THEN 1 ELSE 0 END)," + " sum(CASE WHEN \"TYPE_NAME\" like 'struct%' THEN 1 ELSE 0 END), sum(CASE WHEN \"TYPE_NAME\" like 'map%' THEN 1 ELSE 0 END)" + " from \"TBLS\" t join \"SDS\" s on t.\"SD_ID\" = s.\"SD_ID\" join \"CDS\" c on s.\"CD_ID\" = c.\"CD_ID\" join \"COLUMNS_V2\" v on c.\"CD_ID\" = v.\"CD_ID\"" + " where \"TBL_ID\" in ("; runBatched(batchSize, new ArrayList<>(summaries.keySet()), new Batchable<Long, Void>() { @Override public List<Void> run(List<Long> input) throws Exception { Pair<Query<?>, List<Object[]>> qResult = getResultFromInput(input, queryText0, " group by \"TBL_ID\""); try { List<Object[]> result = qResult.getRight(); if (result != null) { for (Object[] fields : result) { Deadline.checkTimeout(); Long tabId = Long.parseLong(String.valueOf(fields[0])); MetadataTableSummary summary = summaries.get(tabId); summary.columnSummary(Integer.parseInt(fields[1].toString()), Integer.parseInt(fields[2].toString()), Integer.parseInt(fields[3].toString()), Integer.parseInt(fields[4].toString())); } } } finally { qResult.getLeft().closeAll(); } return Collections.emptyList(); } }); } private void collectTabFormatSummary(Map<Long, MetadataTableSummary> summaries) throws MetaException { String queryText0 = "select t.\"TBL_ID\", d.\"SLIB\", s.\"IS_COMPRESSED\" from \"TBLS\" t left join \"SDS\" s on t.\"SD_ID\" = s.\"SD_ID\" left join \"SERDES\" d on d.\"SERDE_ID\" = s.\"SERDE_ID\"" + " where t.\"TBL_ID\" in ("; String queryText1 = "select p.\"TBL_ID\", " + dbType.toVarChar("p.\"PARAM_VALUE\"") + " from \"TABLE_PARAMS\" p " + " where p.\"PARAM_KEY\" = 'transactional_properties' and p.\"TBL_ID\" in ("; List<Long> transactionTables = new ArrayList<>(); runBatched(batchSize, new ArrayList<>(summaries.keySet()), new Batchable<Long, Void>() { @Override public List<Void> run(List<Long> input) throws Exception { Pair<Query<?>, List<Object[]>> qResult = getResultFromInput(input, queryText0, ""); try { List<Object[]> result = qResult.getRight(); if (result != null) { for (Object[] fields : result) { Deadline.checkTimeout(); Long tabId = Long.parseLong(String.valueOf(fields[0])); MetadataTableSummary summary = summaries.get(tabId); String lib = String.valueOf(fields[1]); String compressionType = String.valueOf(fields[2]); collectTabFormatSummary(transactionTables, tabId, summary, lib, compressionType); } } } finally { qResult.getLeft().closeAll(); } return Collections.emptyList(); } }); runBatched(batchSize, transactionTables, new Batchable<Long, Void>() { @Override public List<Void> run(List<Long> input) throws Exception { Pair<Query<?>, List<Object[]>> qResult = getResultFromInput(input, queryText1, ""); try { List<Object[]> result = qResult.getRight(); if (result != null) { for (Object[] fields : result) { Deadline.checkTimeout(); Long tabId = Long.parseLong(String.valueOf(fields[0])); MetadataTableSummary summary = summaries.get(tabId); String transactionalProperties = String.valueOf(fields[1]); if("insert_only".equalsIgnoreCase(transactionalProperties.trim())) { summary.setTableType("HIVE_ACID_INSERT_ONLY"); } } } } finally { qResult.getLeft().closeAll(); } return Collections.emptyList(); } }); } private Pair<Query<?>, List<Object[]>> getResultFromInput(List<Long> input, String queryText0, String subQ) throws MetaException { int size = input.size(); String queryText = queryText0 + (size == 0 ? "" : repeat(",?", size).substring(1)) + ") " + subQ; if (dbType.isMYSQL()) { queryText = queryText.replace("\"", ""); } Object[] params = new Object[size]; for (int i = 0; i < input.size(); ++i) { params[i] = input.get(i); } Deadline.checkTimeout(); Query<?> query = pm.newQuery("javax.jdo.query.SQL", queryText); List<Object[]> result = (List<Object[]>) query.executeWithArray(params); return Pair.of(query, result); } private void collectTabFormatSummary(List<Long> transactionalTables, Long tableId, MetadataTableSummary summary, String slib, String compressionType) { String tblType = summary.getTableType(); String fileType = TableFormat.extractFileFormat(slib); Set<String> nonNativeTabTypes = TableFormat.getNonNativeFormats(); if (nonNativeTabTypes.contains(fileType)) { tblType = fileType.toUpperCase(); } else if (TableType.MANAGED_TABLE.name().equalsIgnoreCase(tblType)) { tblType = "HIVE_ACID_FULL"; transactionalTables.add(tableId); } else if (TableType.EXTERNAL_TABLE.name().equalsIgnoreCase(tblType)) { tblType = "HIVE_EXTERNAL"; } else { tblType = tblType != null ? tblType.toUpperCase() : "NULL"; } if (compressionType.equals("0") || compressionType.equals("f")) { compressionType = "None"; } summary.tableFormatSummary(tblType, compressionType, fileType); } private void collectBasicStats(Map<Long, MetadataTableSummary> summaries, Set<Long> nonPartedTabs, Set<Long> partedTabs) throws MetaException { String queryText0 = "select \"TBL_ID\", \"PARAM_KEY\", CAST(" + dbType.toVarChar("\"PARAM_VALUE\"") + " AS decimal(21,0)) from \"TABLE_PARAMS\" where \"PARAM_KEY\" " + "in ('" + StatsSetupConst.TOTAL_SIZE + "', '" + StatsSetupConst.NUM_FILES + "', '" + StatsSetupConst.ROW_COUNT + "') and \"TBL_ID\" in ("; runBatched(batchSize, new ArrayList<>(nonPartedTabs), new Batchable<Long, Void>() { @Override public List<Void> run(List<Long> input) throws Exception { collectBasicStats(queryText0, input, summaries, ""); return Collections.emptyList(); } }); String queryText1 = "select \"TBL_ID\", \"PARAM_KEY\", sum(CAST(" + dbType.toVarChar("\"PARAM_VALUE\"") + " AS decimal(21,0))) from \"PARTITIONS\" t " + "join \"PARTITION_PARAMS\" p on p.\"PART_ID\" = t.\"PART_ID\" where \"PARAM_KEY\" " + "in ('" + StatsSetupConst.TOTAL_SIZE + "', '" + StatsSetupConst.NUM_FILES + "', '" + StatsSetupConst.ROW_COUNT + "') and t.\"TBL_ID\" in ("; runBatched(batchSize, new ArrayList<>(partedTabs), new Batchable<Long, Void>() { @Override public List<Void> run(List<Long> input) throws Exception { collectBasicStats(queryText1, input, summaries, " group by \"TBL_ID\", \"PARAM_KEY\""); return Collections.emptyList(); } }); } private void collectBasicStats(String queryText0, List<Long> input, Map<Long, MetadataTableSummary> summaries, String subQ) throws MetaException { Pair<Query<?>, List<Object[]>> qResult = getResultFromInput(input, queryText0, subQ); try { List<Object[]> result = qResult.getRight(); if (result != null) { for (Object[] fields : result) { Deadline.checkTimeout(); Long tabId = Long.parseLong(String.valueOf(fields[0])); MetadataTableSummary summary = summaries.get(tabId); feedBasicStats(summary, String.valueOf(fields[1]), fields[2]); } } } finally { qResult.getLeft().closeAll(); } } private void feedBasicStats(MetadataTableSummary summary, String key, Object value) { if (summary == null || value == null || !org.apache.commons.lang3.StringUtils.isNumeric(value.toString())) { return; } long val = Long.parseLong(value.toString()); switch (key) { case StatsSetupConst.TOTAL_SIZE: summary.setTotalSize(val); break; case StatsSetupConst.ROW_COUNT: summary.setNumRows(val); break; case StatsSetupConst.NUM_FILES: summary.setNumFiles(val); break; default: throw new AssertionError("This should never happen!"); } } public Set<Long> filterTablesForSummary(List<MetadataTableSummary> tableSummaries, Integer lastUpdatedDays, Integer tablesLimit) throws MetaException { if (tableSummaries == null || tableSummaries.isEmpty() || (tablesLimit != null && tablesLimit == 0)) { return Collections.emptySet(); } Set<Long> tableIds = tableSummaries.stream().map(MetadataTableSummary::getTableId).collect(Collectors.toSet()); if (lastUpdatedDays == null && (tablesLimit == null || tableIds.size() < tablesLimit)) { return tableIds; } String tableType = tableSummaries.get(0).getTableType(); if (!TableFormat.isIcebergFormat(tableType)) { // we don't support filtering this type yet, ignore... LOG.warn("This table type: {} hasn't been supported selecting the summary yet, ignore...", tableType); return tableIds; } Deadline.checkTimeout(); List<Long> tables = Batchable.runBatched(batchSize, new ArrayList<>(tableIds), new Batchable<Long, Long>() { @Override public List<Long> run(List<Long> input) throws Exception { int size = input.size(); String queryText = "\"TBL_ID\" from \"TABLE_PARAMS\" where \"PARAM_KEY\" = 'current-snapshot-timestamp-ms' " + (lastUpdatedDays != null ? (" and CAST(" + dbType.toVarChar("\"PARAM_VALUE\"") + " AS decimal(21,0)) > " + (System.currentTimeMillis() - lastUpdatedDays * 24 * 3600000L)) : "") + " and \"TBL_ID\" in (" + (size == 0 ? "" : repeat(",?", size).substring(1)) + ") " + " order by CAST(" + dbType.toVarChar("\"PARAM_VALUE\"") + " AS decimal(21,0)) DESC"; if (tablesLimit != null && tablesLimit >= 0) { queryText = sqlGenerator.addLimitClause(tablesLimit, queryText); } else { queryText = "select " + queryText; } if (dbType.isMYSQL()) { queryText = queryText.replace("\"", ""); } Object[] params = new Object[size]; for (int i = 0; i < input.size(); ++i) { params[i] = input.get(i); } Deadline.checkTimeout(); Query<?> query = pm.newQuery("javax.jdo.query.SQL", queryText); List<Long> ids = new ArrayList<>(); try { List<Object> result = (List<Object>) query.executeWithArray(params); if (result != null) { for (Object fields : result) { ids.add(Long.parseLong(fields.toString())); } } } finally { query.closeAll(); } return ids; } }); return new HashSet<>(tables); } }
apache/xmlgraphics-batik
35,778
batik-dom/src/main/java/org/apache/batik/dom/util/DOMUtilities.java
/* Licensed to the Apache Software Foundation (ASF) under one or more contributor license agreements. See the NOTICE file distributed with this work for additional information regarding copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the "License"); you may not use this file except in compliance with the License. You may obtain a copy of the License at http://www.apache.org/licenses/LICENSE-2.0 Unless required by applicable law or agreed to in writing, software distributed under the License is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the License for the specific language governing permissions and limitations under the License. */ package org.apache.batik.dom.util; import java.io.IOException; import java.io.StringReader; import java.io.StringWriter; import java.io.Writer; import java.util.ArrayList; import java.util.HashMap; import java.util.Map; import org.apache.batik.dom.AbstractDocument; import org.apache.batik.constants.XMLConstants; import org.apache.batik.xml.XMLUtilities; import org.w3c.dom.Attr; import org.w3c.dom.DOMException; import org.w3c.dom.DOMImplementation; import org.w3c.dom.Document; import org.w3c.dom.DocumentType; import org.w3c.dom.Element; import org.w3c.dom.NamedNodeMap; import org.w3c.dom.Node; import org.w3c.dom.NodeList; /** * A collection of utility functions for the DOM. * * @author <a href="mailto:stephane@hillion.org">Stephane Hillion</a> * @version $Id$ */ public class DOMUtilities extends XMLUtilities implements XMLConstants { /** * Does not need to be instantiated. */ protected DOMUtilities() { } /** * A node in a linked list of prefix to namespace name mappings. */ private static final class NSMap { /** * The prefix to map. */ private String prefix; /** * The namespace name that the prefix maps to. * The empty string is used to represent no namespace. */ private String ns; /** * The next namespace prefix mapping in the list. */ private NSMap next; /** * The next number to use when generating prefixes. * A prefix of the form <code>"a" + number</code> is generated when * serializing a node whose namespace URI does not correspond to * a prefix in scope. */ private int nextPrefixNumber; /** * Constructs a new namespace prefix mapping object with the * XML and XMLNS namespaces predeclared. */ public static NSMap create() { return new NSMap().declare(XMLConstants.XML_PREFIX, XMLConstants.XML_NAMESPACE_URI) .declare(XMLConstants.XMLNS_PREFIX, XMLConstants.XMLNS_NAMESPACE_URI); } /** * Creates a new <code>NSMap</code> object. */ private NSMap() { } /** * Declares a new prefix mapping by returning a new * <code>NSMap</code> object that links to this one. */ public NSMap declare(String prefix, String ns) { NSMap m = new NSMap(); m.prefix = prefix; m.ns = ns; m.next = this; m.nextPrefixNumber = this.nextPrefixNumber; return m; } /** * Returns a new, generated namespace prefix. */ public String getNewPrefix() { String prefix; do { prefix = "a" + nextPrefixNumber++; } while (getNamespace(prefix) != null); return prefix; } /** * Returns the namespace URI that the specified prefix * maps to, or <code>null</code> if the prefix has not * been declared. */ public String getNamespace(String prefix) { for (NSMap m = this; m.next != null; m = m.next) { if (m.prefix.equals(prefix)) { return m.ns; } } return null; } /** * Returns the prefix appropriate for an element that maps to specified * namespace URI. If the specified namespace is the default namespace * (i.e., it has an empty string prefix mapping to it), then the empty * string is returned. If there is no appropriate prefix, * <code>null</code> is returned. */ public String getPrefixForElement(String ns) { for (NSMap m = this; m.next != null; m = m.next) { if (ns.equals(m.ns)) { return m.prefix; } } return null; } /** * Returns the prefix appropriate for an attribute that maps to * specified namespace URI. If there is no appropriate prefix, * <code>null</code> is returned. */ public String getPrefixForAttr(String ns) { for (NSMap m = this; m.next != null; m = m.next) { if (ns.equals(m.ns) && !m.prefix.equals("")) { return m.prefix; } } return null; } } /** * Serializes the specified <code>Document</code>, writing it to the given * <code>Writer</code>. */ public static void writeDocument(Document doc, Writer w) throws IOException { AbstractDocument d = (AbstractDocument) doc; if (doc.getDocumentElement() == null) { throw new IOException("No document element"); } NSMap m = NSMap.create(); for (Node n = doc.getFirstChild(); n != null; n = n.getNextSibling()) { writeNode(n, w, m, "1.1".equals(d.getXmlVersion())); } } protected static void writeNode(Node n, Writer w, NSMap m, boolean isXML11) throws IOException { switch (n.getNodeType()) { case Node.ELEMENT_NODE: { if (n.hasAttributes()) { NamedNodeMap attr = n.getAttributes(); int len = attr.getLength(); for (int i = 0; i < len; i++) { Attr a = (Attr)attr.item(i); String name = a.getNodeName(); if (name.startsWith("xmlns")) { if (name.length() == 5) { m = m.declare("", a.getNodeValue()); } else { String prefix = name.substring(6); m = m.declare(prefix, a.getNodeValue()); } } } } w.write('<'); String ns = n.getNamespaceURI(); String tagName; if (ns == null) { tagName = n.getNodeName(); w.write(tagName); if (!"".equals(m.getNamespace(""))) { w.write(" xmlns=\"\""); m = m.declare("", ""); } } else { String prefix = n.getPrefix(); if (prefix == null) { prefix = ""; } if (ns.equals(m.getNamespace(prefix))) { tagName = n.getNodeName(); w.write(tagName); } else { prefix = m.getPrefixForElement(ns); if (prefix == null) { prefix = m.getNewPrefix(); tagName = prefix + ':' + n.getLocalName(); w.write(tagName + " xmlns:" + prefix + "=\"" + contentToString(ns, isXML11) + '"'); m = m.declare(prefix, ns); } else { if (prefix.equals("")) { tagName = n.getLocalName(); } else { tagName = prefix + ':' + n.getLocalName(); } w.write(tagName); } } } if (n.hasAttributes()) { NamedNodeMap attr = n.getAttributes(); int len = attr.getLength(); for (int i = 0; i < len; i++) { Attr a = (Attr)attr.item(i); String name = a.getNodeName(); String prefix = a.getPrefix(); String ans = a.getNamespaceURI(); if (ans != null && !("xmlns".equals(prefix) || name.equals("xmlns"))) { if (prefix != null && !ans.equals(m.getNamespace(prefix)) || prefix == null) { prefix = m.getPrefixForAttr(ans); if (prefix == null) { prefix = m.getNewPrefix(); m = m.declare(prefix, ans); w.write(" xmlns:" + prefix + "=\"" + contentToString(ans, isXML11) + '"'); } name = prefix + ':' + a.getLocalName(); } } w.write(' ' + name + "=\"" + contentToString(a.getNodeValue(), isXML11) + '"'); } } Node c = n.getFirstChild(); if (c != null) { w.write('>'); do { writeNode(c, w, m, isXML11); c = c.getNextSibling(); } while (c != null); w.write("</" + tagName + '>'); } else { w.write("/>"); } break; } case Node.TEXT_NODE: w.write(contentToString(n.getNodeValue(), isXML11)); break; case Node.CDATA_SECTION_NODE: { String data = n.getNodeValue(); if (data.indexOf("]]>") != -1) { throw new IOException("Unserializable CDATA section node"); } w.write("<![CDATA[" + assertValidCharacters(data, isXML11) + "]]>"); break; } case Node.ENTITY_REFERENCE_NODE: w.write('&' + n.getNodeName() + ';'); break; case Node.PROCESSING_INSTRUCTION_NODE: { String target = n.getNodeName(); String data = n.getNodeValue(); if (target.equalsIgnoreCase("xml") || target.indexOf(':') != -1 || data.indexOf("?>") != -1) { throw new IOException("Unserializable processing instruction node"); } w.write("<?" + target + ' ' + data + "?>"); break; } case Node.COMMENT_NODE: { w.write("<!--"); String data = n.getNodeValue(); int len = data.length(); if (len != 0 && data.charAt(len - 1) == '-' || data.indexOf("--") != -1) { throw new IOException("Unserializable comment node"); } w.write(data); w.write("-->"); break; } case Node.DOCUMENT_TYPE_NODE: { DocumentType dt = (DocumentType)n; w.write("<!DOCTYPE " + n.getOwnerDocument().getDocumentElement().getNodeName()); String pubID = dt.getPublicId(); if (pubID != null) { char q = getUsableQuote(pubID); if (q == 0) { throw new IOException("Unserializable DOCTYPE node"); } w.write(" PUBLIC " + q + pubID + q); } String sysID = dt.getSystemId(); if (sysID != null) { char q = getUsableQuote(sysID); if (q == 0) { throw new IOException("Unserializable DOCTYPE node"); } if (pubID == null) { w.write(" SYSTEM"); } w.write(" " + q + sysID + q); } String subset = dt.getInternalSubset(); if (subset != null) { w.write('[' + subset + ']'); } w.write('>'); break; } default: throw new IOException("Unknown DOM node type " + n.getNodeType()); } } /** * Writes a node using the given writer. */ public static void writeNode(Node n, Writer w) throws IOException { if (n.getNodeType() == Node.DOCUMENT_NODE) { writeDocument((Document) n, w); } else { AbstractDocument d = (AbstractDocument) n.getOwnerDocument(); writeNode(n, w, NSMap.create(), d == null ? false : "1.1".equals(d.getXmlVersion())); } } /** * Returns the quote character to use when quoting the specified string. * If the string contains both single and double quotes, then 0 will be * returned. */ private static char getUsableQuote(String s) { char ret = 0; int i = s.length() - 1; while (i >= 0) { char c = s.charAt(i); if (c == '"') { if (ret == 0) { ret = '\''; } else { return 0; } } else if (c == '\'') { if (ret == 0) { ret = '"'; } else { return 0; } } i--; } return ret == 0 ? '"' : ret; } /** * Serializes the given DOM node using {@link #writeNode(Node,Writer)} * and returns the XML as a String. * * @param n The Node to serialize. * @return A String containing the XML serialization of the Node, or an * empty String if there was a problem during serialization. */ public static String getXML(Node n) { Writer writer = new StringWriter(); try { DOMUtilities.writeNode(n, writer); writer.close(); } catch (IOException ex) { return ""; } return writer.toString(); } protected static String assertValidCharacters(String s, boolean isXML11) throws IOException { int len = s.length(); for (int i = 0; i < len; i++) { char c = s.charAt(i); if (!isXML11 && !isXMLCharacter(c) || isXML11 && !isXML11Character(c)) { throw new IOException("Invalid character"); } } return s; } /** * Returns the given content value transformed to replace invalid * characters with entities. */ public static String contentToString(String s, boolean isXML11) throws IOException { StringBuffer result = new StringBuffer(s.length()); int len = s.length(); for (int i = 0; i < len; i++) { char c = s.charAt(i); if (!isXML11 && !isXMLCharacter(c) || isXML11 && !isXML11Character(c)) { throw new IOException("Invalid character"); } switch (c) { case '<': result.append("&lt;"); break; case '>': result.append("&gt;"); break; case '&': result.append("&amp;"); break; case '"': result.append("&quot;"); break; case '\'': result.append("&apos;"); break; default: result.append(c); } } return result.toString(); } /** * Finds and returns the index of child node in the given parent's children * array * * @param child * The child node * @param parent * The parent node * @return the index */ public static int getChildIndex(Node child, Node parent) { if (child == null || child.getParentNode() != parent || child.getParentNode() == null) { return -1; } return getChildIndex(child); } /** * Finds and returns the index of child node in its parent's children array * * @param child * The child node * @return the index in children array */ public static int getChildIndex(Node child) { NodeList children = child.getParentNode().getChildNodes(); for (int i = 0; i < children.getLength(); i++) { Node currentChild = children.item(i); if (currentChild == child) { return i; } } return -1; } /** * Checks if any of from the given list of nodes is an ancestor to another * node * * @param ancestorNodes * The potential ancestor nodes * @param node * The potential descendant node * @return True if at least one node is ancestor of the given node */ public static boolean isAnyNodeAncestorOf(ArrayList ancestorNodes, Node node) { int n = ancestorNodes.size(); for (Object ancestorNode : ancestorNodes) { Node ancestor = (Node) ancestorNode; if (isAncestorOf(ancestor, node)) { return true; } } return false; } /** * Checks whether a node is ancestor of another node. * * @param node * The potential ancestor node * @param descendant * The potential descendant node * @return True if node is ancestor of the descendant node */ public static boolean isAncestorOf(Node node, Node descendant) { if (node == null || descendant == null) { return false; } for (Node currentNode = descendant.getParentNode(); currentNode != null; currentNode = currentNode .getParentNode()) { if (currentNode == node) { return true; } } return false; } /** * Tests whether the given node is a child of the given parent node. * * @param node * The potential child node * @param parentNode * Parent node * @return True if a node is a child of the given parent node */ public static boolean isParentOf(Node node, Node parentNode) { if (node == null || parentNode == null || node.getParentNode() != parentNode) { return false; } return true; } /** * Checks if the node can be appended on the given parent node * * @param node * The given node * @param parentNode * The given parent node * @return True if the given node can be appended on the parent node */ public static boolean canAppend(Node node, Node parentNode) { if (node == null || parentNode == null || node == parentNode || isAncestorOf(node, parentNode)) { return false; } return true; } /** * Checks whether any of the nodes from the list can be appended to a given * parentNode. * * @param children * The given node list * @param parentNode * The potential parent node * @return true if at least one node from a list can be appended */ public static boolean canAppendAny(ArrayList children, Node parentNode) { if (!canHaveChildren(parentNode)) { return false; } int n = children.size(); for (Object aChildren : children) { Node child = (Node) aChildren; if (canAppend(child, parentNode)) { return true; } } return false; } /** * Returns whether the given Node can have children. * * @param parentNode The Node to test * @return <code>true</code> if the node can have children, * <code>false</code> otherwise */ public static boolean canHaveChildren(Node parentNode) { if (parentNode == null) { return false; } switch (parentNode.getNodeType()) { case Node.DOCUMENT_NODE: case Node.TEXT_NODE: case Node.COMMENT_NODE: case Node.CDATA_SECTION_NODE: case Node.PROCESSING_INSTRUCTION_NODE: return false; default: return true; } } /** * Parses the given XML string into a DocumentFragment of the given document * or a new document if 'doc' is null. * * @param text * The given XML string * @param doc * The given document * @param uri * The document URI * @param prefixes * The prefixes map with (prefix, namespaceURI) pairs * @param wrapperElementName * null: Ignore the wrapper element and prefixes map and try to * parse the text as a whole document otherwise: Wrap the given * text with the wrapper element with prefixes specified from the * prefixes map * @param documentFactory * What document factory to use when parsing the text * @return The document fragment or null on error. */ public static Node parseXML(String text, Document doc, String uri, Map prefixes, String wrapperElementName, SAXDocumentFactory documentFactory) { // Create the wrapper element prefix and suffix, copying the (prefix, // namespaceURI) pairs from the prefixes map String wrapperElementPrefix = ""; String wrapperElementSuffix = ""; if (wrapperElementName != null) { wrapperElementPrefix = "<" + wrapperElementName; // Copy the prefixes from the prefixes map to the wrapper element if (prefixes != null) { wrapperElementPrefix += " "; for (Object o : prefixes.entrySet()) { Map.Entry e = (Map.Entry) o; String currentKey = (String) e.getKey(); String currentValue = (String) e.getValue(); wrapperElementPrefix += currentKey + "=\"" + currentValue + "\" "; } } wrapperElementPrefix += ">"; wrapperElementSuffix += "</" + wrapperElementName + '>'; } // Try and parse as a whole document, if no wrapper element is specified if (wrapperElementPrefix.trim().length() == 0 && wrapperElementSuffix.trim().length() == 0) { try { Document d = documentFactory.createDocument(uri, new StringReader(text)); if (doc == null) { return d; } Node result = doc.createDocumentFragment(); result .appendChild(doc.importNode(d.getDocumentElement(), true)); return result; } catch (Exception ex) { } } // Try and parse as a document fragment StringBuffer sb = new StringBuffer(wrapperElementPrefix.length() + text.length() + wrapperElementSuffix.length()); sb.append(wrapperElementPrefix); sb.append(text); sb.append(wrapperElementSuffix); String newText = sb.toString(); try { Document d = documentFactory.createDocument(uri, new StringReader( newText)); if (doc == null) { return d; } for (Node node = d.getDocumentElement().getFirstChild(); node != null; node = node.getNextSibling()) { if (node.getNodeType() == Node.ELEMENT_NODE) { node = doc.importNode(node, true); Node result = doc.createDocumentFragment(); result.appendChild(node); return result; } } } catch (Exception exc) { } return null; } /** * Deep clones a document using the given DOM implementation. */ public static Document deepCloneDocument(Document doc, DOMImplementation impl) { Element root = doc.getDocumentElement(); Document result = impl.createDocument(root.getNamespaceURI(), root.getNodeName(), null); Element rroot = result.getDocumentElement(); boolean before = true; for (Node n = doc.getFirstChild(); n != null; n = n.getNextSibling()) { if (n == root) { before = false; if (root.hasAttributes()) { NamedNodeMap attr = root.getAttributes(); int len = attr.getLength(); for (int i = 0; i < len; i++) { rroot.setAttributeNode((Attr)result.importNode(attr.item(i), true)); } } for (Node c = root.getFirstChild(); c != null; c = c.getNextSibling()) { rroot.appendChild(result.importNode(c, true)); } } else { if (n.getNodeType() != Node.DOCUMENT_TYPE_NODE) { if (before) { result.insertBefore(result.importNode(n, true), rroot); } else { result.appendChild(result.importNode(n, true)); } } } } return result; } /** * Tests whether the given string is a valid name. */ public static boolean isValidName(String s) { int len = s.length(); if (len == 0) { return false; } char c = s.charAt(0); int d = c / 32; int m = c % 32; if ((NAME_FIRST_CHARACTER[d] & (1 << m)) == 0) { return false; } for (int i = 1; i < len; i++) { c = s.charAt(i); d = c / 32; m = c % 32; if ((NAME_CHARACTER[d] & (1 << m)) == 0) { return false; } } return true; } /** * Tests whether the given string is a valid XML 1.1 name. */ public static boolean isValidName11(String s) { int len = s.length(); if (len == 0) { return false; } char c = s.charAt(0); int d = c / 32; int m = c % 32; if ((NAME11_FIRST_CHARACTER[d] & (1 << m)) == 0) { return false; } for (int i = 1; i < len; i++) { c = s.charAt(i); d = c / 32; m = c % 32; if ((NAME11_CHARACTER[d] & (1 << m)) == 0) { return false; } } return true; } /** * Tests whether the given string is a valid prefix. * This method assume that isValidName(s) is true. */ public static boolean isValidPrefix(String s) { return s.indexOf(':') == -1; } /** * Gets the prefix from the given qualified name. * This method assume that isValidName(s) is true. */ public static String getPrefix(String s) { int i = s.indexOf(':'); return (i == -1 || i == s.length()-1) ? null : s.substring(0, i); } /** * Gets the local name from the given qualified name. * This method assume that isValidName(s) is true. */ public static String getLocalName(String s) { int i = s.indexOf(':'); return (i == -1 || i == s.length()-1) ? s : s.substring(i + 1); } /** * Parses a 'xml-stylesheet' processing instruction data section and * puts the pseudo attributes in the given table. */ public static void parseStyleSheetPIData(String data, HashMap<String, String> table) { // !!! Internationalization char c; int i = 0; // Skip leading whitespaces while (i < data.length()) { c = data.charAt(i); if (!XMLUtilities.isXMLSpace(c)) { break; } i++; } while (i < data.length()) { // Parse the pseudo attribute name c = data.charAt(i); int d = c / 32; int m = c % 32; if ((NAME_FIRST_CHARACTER[d] & (1 << m)) == 0) { throw new DOMException(DOMException.INVALID_CHARACTER_ERR, "Wrong name initial: " + c); } StringBuffer ident = new StringBuffer(); ident.append(c); while (++i < data.length()) { c = data.charAt(i); d = c / 32; m = c % 32; if ((NAME_CHARACTER[d] & (1 << m)) == 0) { break; } ident.append(c); } if (i >= data.length()) { throw new DOMException(DOMException.SYNTAX_ERR, "Wrong xml-stylesheet data: " + data); } // Skip whitespaces while (i < data.length()) { c = data.charAt(i); if (!XMLUtilities.isXMLSpace(c)) { break; } i++; } if (i >= data.length()) { throw new DOMException(DOMException.SYNTAX_ERR, "Wrong xml-stylesheet data: " + data); } // The next char must be '=' if (data.charAt(i) != '=') { throw new DOMException(DOMException.SYNTAX_ERR, "Wrong xml-stylesheet data: " + data); } i++; // Skip whitespaces while (i < data.length()) { c = data.charAt(i); if (!XMLUtilities.isXMLSpace(c)) { break; } i++; } if (i >= data.length()) { throw new DOMException(DOMException.SYNTAX_ERR, "Wrong xml-stylesheet data: " + data); } // The next char must be '\'' or '"' c = data.charAt(i); i++; StringBuffer value = new StringBuffer(); if (c == '\'') { while (i < data.length()) { c = data.charAt(i); if (c == '\'') { break; } value.append(c); i++; } if (i >= data.length()) { throw new DOMException(DOMException.SYNTAX_ERR, "Wrong xml-stylesheet data: " + data); } } else if (c == '"') { while (i < data.length()) { c = data.charAt(i); if (c == '"') { break; } value.append(c); i++; } if (i >= data.length()) { throw new DOMException(DOMException.SYNTAX_ERR, "Wrong xml-stylesheet data: " + data); } } else { throw new DOMException(DOMException.SYNTAX_ERR, "Wrong xml-stylesheet data: " + data); } table.put(ident.toString().intern(), value.toString()); i++; // Skip whitespaces while (i < data.length()) { c = data.charAt(i); if (!XMLUtilities.isXMLSpace(c)) { break; } i++; } } } /** * String constants representing DOM modifier strings for various all * key lock combinations. */ protected static final String[] LOCK_STRINGS = { "", "CapsLock", "NumLock", "NumLock CapsLock", "Scroll", "Scroll CapsLock", "Scroll NumLock", "Scroll NumLock CapsLock", "KanaMode", "KanaMode CapsLock", "KanaMode NumLock", "KanaMode NumLock CapsLock", "KanaMode Scroll", "KanaMode Scroll CapsLock", "KanaMode Scroll NumLock", "KanaMode Scroll NumLock CapsLock" }; /** * String constants representing DOM modifier strings for various all * shift modifier combinations. */ protected static final String[] MODIFIER_STRINGS = { "", "Shift", "Control", "Control Shift", "Meta", "Meta Shift", "Control Meta", "Control Meta Shift", "Alt", "Alt Shift", "Alt Control", "Alt Control Shift", "Alt Meta", "Alt Meta Shift", "Alt Control Meta", "Alt Control Meta Shift", "AltGraph", "AltGraph Shift", "AltGraph Control", "AltGraph Control Shift", "AltGraph Meta", "AltGraph Meta Shift", "AltGraph Control Meta", "AltGraph Control Meta Shift", "Alt AltGraph", "Alt AltGraph Shift", "Alt AltGraph Control", "Alt AltGraph Control Shift", "Alt AltGraph Meta", "Alt AltGraph Meta Shift", "Alt AltGraph Control Meta", "Alt AltGraph Control Meta Shift" }; /** * Gets a DOM 3 modifiers string from the given lock and * shift bitmasks. */ public static String getModifiersList(int lockState, int modifiersEx) { if ((modifiersEx & (1 << 13)) != 0) { modifiersEx = 0x10 | ((modifiersEx >> 6) & 0x0f); } else { modifiersEx = (modifiersEx >> 6) & 0x0f; } String s = LOCK_STRINGS[lockState & 0x0f]; if (s.length() != 0) { return s + ' ' + MODIFIER_STRINGS[modifiersEx]; } return MODIFIER_STRINGS[modifiersEx]; } /** * Returns whether the given element has a particular attribute and that * it exists due to being specified explicitly, rather than being defaulted * from a DTD. */ public static boolean isAttributeSpecifiedNS(Element e, String namespaceURI, String localName) { Attr a = e.getAttributeNodeNS(namespaceURI, localName); return a != null && a.getSpecified(); } }
googleapis/google-cloud-java
35,851
java-compute/proto-google-cloud-compute-v1/src/main/java/com/google/cloud/compute/v1/GetIamPolicyResourcePolicyRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/compute/v1/compute.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.compute.v1; /** * * * <pre> * A request message for ResourcePolicies.GetIamPolicy. See the method description for details. * </pre> * * Protobuf type {@code google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest} */ public final class GetIamPolicyResourcePolicyRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest) GetIamPolicyResourcePolicyRequestOrBuilder { private static final long serialVersionUID = 0L; // Use GetIamPolicyResourcePolicyRequest.newBuilder() to construct. private GetIamPolicyResourcePolicyRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private GetIamPolicyResourcePolicyRequest() { project_ = ""; region_ = ""; resource_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new GetIamPolicyResourcePolicyRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_GetIamPolicyResourcePolicyRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_GetIamPolicyResourcePolicyRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest.class, com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest.Builder.class); } private int bitField0_; public static final int OPTIONS_REQUESTED_POLICY_VERSION_FIELD_NUMBER = 499220029; private int optionsRequestedPolicyVersion_ = 0; /** * * * <pre> * Requested IAM Policy version. * </pre> * * <code>optional int32 options_requested_policy_version = 499220029;</code> * * @return Whether the optionsRequestedPolicyVersion field is set. */ @java.lang.Override public boolean hasOptionsRequestedPolicyVersion() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Requested IAM Policy version. * </pre> * * <code>optional int32 options_requested_policy_version = 499220029;</code> * * @return The optionsRequestedPolicyVersion. */ @java.lang.Override public int getOptionsRequestedPolicyVersion() { return optionsRequestedPolicyVersion_; } public static final int PROJECT_FIELD_NUMBER = 227560217; @SuppressWarnings("serial") private volatile java.lang.Object project_ = ""; /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The project. */ @java.lang.Override public java.lang.String getProject() { java.lang.Object ref = project_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); project_ = s; return s; } } /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for project. */ @java.lang.Override public com.google.protobuf.ByteString getProjectBytes() { java.lang.Object ref = project_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); project_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int REGION_FIELD_NUMBER = 138946292; @SuppressWarnings("serial") private volatile java.lang.Object region_ = ""; /** * * * <pre> * The name of the region for this request. * </pre> * * <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The region. */ @java.lang.Override public java.lang.String getRegion() { java.lang.Object ref = region_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); region_ = s; return s; } } /** * * * <pre> * The name of the region for this request. * </pre> * * <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for region. */ @java.lang.Override public com.google.protobuf.ByteString getRegionBytes() { java.lang.Object ref = region_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); region_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int RESOURCE_FIELD_NUMBER = 195806222; @SuppressWarnings("serial") private volatile java.lang.Object resource_ = ""; /** * * * <pre> * Name or id of the resource for this request. * </pre> * * <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The resource. */ @java.lang.Override public java.lang.String getResource() { java.lang.Object ref = resource_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resource_ = s; return s; } } /** * * * <pre> * Name or id of the resource for this request. * </pre> * * <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for resource. */ @java.lang.Override public com.google.protobuf.ByteString getResourceBytes() { java.lang.Object ref = resource_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); resource_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(region_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 138946292, region_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 195806222, resource_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 227560217, project_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeInt32(499220029, optionsRequestedPolicyVersion_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(region_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(138946292, region_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(resource_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(195806222, resource_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(project_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(227560217, project_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeInt32Size( 499220029, optionsRequestedPolicyVersion_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest)) { return super.equals(obj); } com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest other = (com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest) obj; if (hasOptionsRequestedPolicyVersion() != other.hasOptionsRequestedPolicyVersion()) return false; if (hasOptionsRequestedPolicyVersion()) { if (getOptionsRequestedPolicyVersion() != other.getOptionsRequestedPolicyVersion()) return false; } if (!getProject().equals(other.getProject())) return false; if (!getRegion().equals(other.getRegion())) return false; if (!getResource().equals(other.getResource())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasOptionsRequestedPolicyVersion()) { hash = (37 * hash) + OPTIONS_REQUESTED_POLICY_VERSION_FIELD_NUMBER; hash = (53 * hash) + getOptionsRequestedPolicyVersion(); } hash = (37 * hash) + PROJECT_FIELD_NUMBER; hash = (53 * hash) + getProject().hashCode(); hash = (37 * hash) + REGION_FIELD_NUMBER; hash = (53 * hash) + getRegion().hashCode(); hash = (37 * hash) + RESOURCE_FIELD_NUMBER; hash = (53 * hash) + getResource().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A request message for ResourcePolicies.GetIamPolicy. See the method description for details. * </pre> * * Protobuf type {@code google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest) com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_GetIamPolicyResourcePolicyRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_GetIamPolicyResourcePolicyRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest.class, com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest.Builder.class); } // Construct using com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; optionsRequestedPolicyVersion_ = 0; project_ = ""; region_ = ""; resource_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.compute.v1.Compute .internal_static_google_cloud_compute_v1_GetIamPolicyResourcePolicyRequest_descriptor; } @java.lang.Override public com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest getDefaultInstanceForType() { return com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest build() { com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest buildPartial() { com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest result = new com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.optionsRequestedPolicyVersion_ = optionsRequestedPolicyVersion_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.project_ = project_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.region_ = region_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.resource_ = resource_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest) { return mergeFrom((com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest other) { if (other == com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest.getDefaultInstance()) return this; if (other.hasOptionsRequestedPolicyVersion()) { setOptionsRequestedPolicyVersion(other.getOptionsRequestedPolicyVersion()); } if (!other.getProject().isEmpty()) { project_ = other.project_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getRegion().isEmpty()) { region_ = other.region_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getResource().isEmpty()) { resource_ = other.resource_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 1111570338: { region_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 1111570338 case 1566449778: { resource_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 1566449778 case 1820481738: { project_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 1820481738 case -301207064: { optionsRequestedPolicyVersion_ = input.readInt32(); bitField0_ |= 0x00000001; break; } // case -301207064 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int optionsRequestedPolicyVersion_; /** * * * <pre> * Requested IAM Policy version. * </pre> * * <code>optional int32 options_requested_policy_version = 499220029;</code> * * @return Whether the optionsRequestedPolicyVersion field is set. */ @java.lang.Override public boolean hasOptionsRequestedPolicyVersion() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Requested IAM Policy version. * </pre> * * <code>optional int32 options_requested_policy_version = 499220029;</code> * * @return The optionsRequestedPolicyVersion. */ @java.lang.Override public int getOptionsRequestedPolicyVersion() { return optionsRequestedPolicyVersion_; } /** * * * <pre> * Requested IAM Policy version. * </pre> * * <code>optional int32 options_requested_policy_version = 499220029;</code> * * @param value The optionsRequestedPolicyVersion to set. * @return This builder for chaining. */ public Builder setOptionsRequestedPolicyVersion(int value) { optionsRequestedPolicyVersion_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Requested IAM Policy version. * </pre> * * <code>optional int32 options_requested_policy_version = 499220029;</code> * * @return This builder for chaining. */ public Builder clearOptionsRequestedPolicyVersion() { bitField0_ = (bitField0_ & ~0x00000001); optionsRequestedPolicyVersion_ = 0; onChanged(); return this; } private java.lang.Object project_ = ""; /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The project. */ public java.lang.String getProject() { java.lang.Object ref = project_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); project_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for project. */ public com.google.protobuf.ByteString getProjectBytes() { java.lang.Object ref = project_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); project_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The project to set. * @return This builder for chaining. */ public Builder setProject(java.lang.String value) { if (value == null) { throw new NullPointerException(); } project_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearProject() { project_ = getDefaultInstance().getProject(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Project ID for this request. * </pre> * * <code>string project = 227560217 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for project to set. * @return This builder for chaining. */ public Builder setProjectBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); project_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object region_ = ""; /** * * * <pre> * The name of the region for this request. * </pre> * * <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The region. */ public java.lang.String getRegion() { java.lang.Object ref = region_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); region_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The name of the region for this request. * </pre> * * <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for region. */ public com.google.protobuf.ByteString getRegionBytes() { java.lang.Object ref = region_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); region_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The name of the region for this request. * </pre> * * <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The region to set. * @return This builder for chaining. */ public Builder setRegion(java.lang.String value) { if (value == null) { throw new NullPointerException(); } region_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The name of the region for this request. * </pre> * * <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearRegion() { region_ = getDefaultInstance().getRegion(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * The name of the region for this request. * </pre> * * <code>string region = 138946292 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for region to set. * @return This builder for chaining. */ public Builder setRegionBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); region_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object resource_ = ""; /** * * * <pre> * Name or id of the resource for this request. * </pre> * * <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The resource. */ public java.lang.String getResource() { java.lang.Object ref = resource_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); resource_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Name or id of the resource for this request. * </pre> * * <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for resource. */ public com.google.protobuf.ByteString getResourceBytes() { java.lang.Object ref = resource_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); resource_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Name or id of the resource for this request. * </pre> * * <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The resource to set. * @return This builder for chaining. */ public Builder setResource(java.lang.String value) { if (value == null) { throw new NullPointerException(); } resource_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Name or id of the resource for this request. * </pre> * * <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearResource() { resource_ = getDefaultInstance().getResource(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Name or id of the resource for this request. * </pre> * * <code>string resource = 195806222 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for resource to set. * @return This builder for chaining. */ public Builder setResourceBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); resource_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest) } // @@protoc_insertion_point(class_scope:google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest) private static final com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest(); } public static com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<GetIamPolicyResourcePolicyRequest> PARSER = new com.google.protobuf.AbstractParser<GetIamPolicyResourcePolicyRequest>() { @java.lang.Override public GetIamPolicyResourcePolicyRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<GetIamPolicyResourcePolicyRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<GetIamPolicyResourcePolicyRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.compute.v1.GetIamPolicyResourcePolicyRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,878
java-dataplex/proto-google-cloud-dataplex-v1/src/main/java/com/google/cloud/dataplex/v1/UpdateDataScanRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dataplex/v1/datascans.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dataplex.v1; /** * * * <pre> * Update dataScan request. * </pre> * * Protobuf type {@code google.cloud.dataplex.v1.UpdateDataScanRequest} */ public final class UpdateDataScanRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dataplex.v1.UpdateDataScanRequest) UpdateDataScanRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateDataScanRequest.newBuilder() to construct. private UpdateDataScanRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateDataScanRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateDataScanRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataplex.v1.DataScansProto .internal_static_google_cloud_dataplex_v1_UpdateDataScanRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataplex.v1.DataScansProto .internal_static_google_cloud_dataplex_v1_UpdateDataScanRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataplex.v1.UpdateDataScanRequest.class, com.google.cloud.dataplex.v1.UpdateDataScanRequest.Builder.class); } private int bitField0_; public static final int DATA_SCAN_FIELD_NUMBER = 1; private com.google.cloud.dataplex.v1.DataScan dataScan_; /** * * * <pre> * Required. DataScan resource to be updated. * * Only fields specified in `update_mask` are updated. * </pre> * * <code> * .google.cloud.dataplex.v1.DataScan data_scan = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the dataScan field is set. */ @java.lang.Override public boolean hasDataScan() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. DataScan resource to be updated. * * Only fields specified in `update_mask` are updated. * </pre> * * <code> * .google.cloud.dataplex.v1.DataScan data_scan = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The dataScan. */ @java.lang.Override public com.google.cloud.dataplex.v1.DataScan getDataScan() { return dataScan_ == null ? com.google.cloud.dataplex.v1.DataScan.getDefaultInstance() : dataScan_; } /** * * * <pre> * Required. DataScan resource to be updated. * * Only fields specified in `update_mask` are updated. * </pre> * * <code> * .google.cloud.dataplex.v1.DataScan data_scan = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.dataplex.v1.DataScanOrBuilder getDataScanOrBuilder() { return dataScan_ == null ? com.google.cloud.dataplex.v1.DataScan.getDefaultInstance() : dataScan_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Optional. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Optional. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } public static final int VALIDATE_ONLY_FIELD_NUMBER = 3; private boolean validateOnly_ = false; /** * * * <pre> * Optional. Only validate the request, but do not perform mutations. * The default is `false`. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getDataScan()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } if (validateOnly_ != false) { output.writeBool(3, validateOnly_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getDataScan()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } if (validateOnly_ != false) { size += com.google.protobuf.CodedOutputStream.computeBoolSize(3, validateOnly_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dataplex.v1.UpdateDataScanRequest)) { return super.equals(obj); } com.google.cloud.dataplex.v1.UpdateDataScanRequest other = (com.google.cloud.dataplex.v1.UpdateDataScanRequest) obj; if (hasDataScan() != other.hasDataScan()) return false; if (hasDataScan()) { if (!getDataScan().equals(other.getDataScan())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (getValidateOnly() != other.getValidateOnly()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasDataScan()) { hash = (37 * hash) + DATA_SCAN_FIELD_NUMBER; hash = (53 * hash) + getDataScan().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (37 * hash) + VALIDATE_ONLY_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashBoolean(getValidateOnly()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dataplex.v1.UpdateDataScanRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataplex.v1.UpdateDataScanRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataplex.v1.UpdateDataScanRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataplex.v1.UpdateDataScanRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataplex.v1.UpdateDataScanRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataplex.v1.UpdateDataScanRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataplex.v1.UpdateDataScanRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataplex.v1.UpdateDataScanRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataplex.v1.UpdateDataScanRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dataplex.v1.UpdateDataScanRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataplex.v1.UpdateDataScanRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataplex.v1.UpdateDataScanRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.dataplex.v1.UpdateDataScanRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Update dataScan request. * </pre> * * Protobuf type {@code google.cloud.dataplex.v1.UpdateDataScanRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dataplex.v1.UpdateDataScanRequest) com.google.cloud.dataplex.v1.UpdateDataScanRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataplex.v1.DataScansProto .internal_static_google_cloud_dataplex_v1_UpdateDataScanRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataplex.v1.DataScansProto .internal_static_google_cloud_dataplex_v1_UpdateDataScanRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataplex.v1.UpdateDataScanRequest.class, com.google.cloud.dataplex.v1.UpdateDataScanRequest.Builder.class); } // Construct using com.google.cloud.dataplex.v1.UpdateDataScanRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getDataScanFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; dataScan_ = null; if (dataScanBuilder_ != null) { dataScanBuilder_.dispose(); dataScanBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } validateOnly_ = false; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dataplex.v1.DataScansProto .internal_static_google_cloud_dataplex_v1_UpdateDataScanRequest_descriptor; } @java.lang.Override public com.google.cloud.dataplex.v1.UpdateDataScanRequest getDefaultInstanceForType() { return com.google.cloud.dataplex.v1.UpdateDataScanRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dataplex.v1.UpdateDataScanRequest build() { com.google.cloud.dataplex.v1.UpdateDataScanRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dataplex.v1.UpdateDataScanRequest buildPartial() { com.google.cloud.dataplex.v1.UpdateDataScanRequest result = new com.google.cloud.dataplex.v1.UpdateDataScanRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.dataplex.v1.UpdateDataScanRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.dataScan_ = dataScanBuilder_ == null ? dataScan_ : dataScanBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.validateOnly_ = validateOnly_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dataplex.v1.UpdateDataScanRequest) { return mergeFrom((com.google.cloud.dataplex.v1.UpdateDataScanRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dataplex.v1.UpdateDataScanRequest other) { if (other == com.google.cloud.dataplex.v1.UpdateDataScanRequest.getDefaultInstance()) return this; if (other.hasDataScan()) { mergeDataScan(other.getDataScan()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } if (other.getValidateOnly() != false) { setValidateOnly(other.getValidateOnly()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getDataScanFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 24: { validateOnly_ = input.readBool(); bitField0_ |= 0x00000004; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.dataplex.v1.DataScan dataScan_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataplex.v1.DataScan, com.google.cloud.dataplex.v1.DataScan.Builder, com.google.cloud.dataplex.v1.DataScanOrBuilder> dataScanBuilder_; /** * * * <pre> * Required. DataScan resource to be updated. * * Only fields specified in `update_mask` are updated. * </pre> * * <code> * .google.cloud.dataplex.v1.DataScan data_scan = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the dataScan field is set. */ public boolean hasDataScan() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. DataScan resource to be updated. * * Only fields specified in `update_mask` are updated. * </pre> * * <code> * .google.cloud.dataplex.v1.DataScan data_scan = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The dataScan. */ public com.google.cloud.dataplex.v1.DataScan getDataScan() { if (dataScanBuilder_ == null) { return dataScan_ == null ? com.google.cloud.dataplex.v1.DataScan.getDefaultInstance() : dataScan_; } else { return dataScanBuilder_.getMessage(); } } /** * * * <pre> * Required. DataScan resource to be updated. * * Only fields specified in `update_mask` are updated. * </pre> * * <code> * .google.cloud.dataplex.v1.DataScan data_scan = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setDataScan(com.google.cloud.dataplex.v1.DataScan value) { if (dataScanBuilder_ == null) { if (value == null) { throw new NullPointerException(); } dataScan_ = value; } else { dataScanBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. DataScan resource to be updated. * * Only fields specified in `update_mask` are updated. * </pre> * * <code> * .google.cloud.dataplex.v1.DataScan data_scan = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setDataScan(com.google.cloud.dataplex.v1.DataScan.Builder builderForValue) { if (dataScanBuilder_ == null) { dataScan_ = builderForValue.build(); } else { dataScanBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. DataScan resource to be updated. * * Only fields specified in `update_mask` are updated. * </pre> * * <code> * .google.cloud.dataplex.v1.DataScan data_scan = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeDataScan(com.google.cloud.dataplex.v1.DataScan value) { if (dataScanBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && dataScan_ != null && dataScan_ != com.google.cloud.dataplex.v1.DataScan.getDefaultInstance()) { getDataScanBuilder().mergeFrom(value); } else { dataScan_ = value; } } else { dataScanBuilder_.mergeFrom(value); } if (dataScan_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. DataScan resource to be updated. * * Only fields specified in `update_mask` are updated. * </pre> * * <code> * .google.cloud.dataplex.v1.DataScan data_scan = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearDataScan() { bitField0_ = (bitField0_ & ~0x00000001); dataScan_ = null; if (dataScanBuilder_ != null) { dataScanBuilder_.dispose(); dataScanBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. DataScan resource to be updated. * * Only fields specified in `update_mask` are updated. * </pre> * * <code> * .google.cloud.dataplex.v1.DataScan data_scan = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dataplex.v1.DataScan.Builder getDataScanBuilder() { bitField0_ |= 0x00000001; onChanged(); return getDataScanFieldBuilder().getBuilder(); } /** * * * <pre> * Required. DataScan resource to be updated. * * Only fields specified in `update_mask` are updated. * </pre> * * <code> * .google.cloud.dataplex.v1.DataScan data_scan = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dataplex.v1.DataScanOrBuilder getDataScanOrBuilder() { if (dataScanBuilder_ != null) { return dataScanBuilder_.getMessageOrBuilder(); } else { return dataScan_ == null ? com.google.cloud.dataplex.v1.DataScan.getDefaultInstance() : dataScan_; } } /** * * * <pre> * Required. DataScan resource to be updated. * * Only fields specified in `update_mask` are updated. * </pre> * * <code> * .google.cloud.dataplex.v1.DataScan data_scan = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataplex.v1.DataScan, com.google.cloud.dataplex.v1.DataScan.Builder, com.google.cloud.dataplex.v1.DataScanOrBuilder> getDataScanFieldBuilder() { if (dataScanBuilder_ == null) { dataScanBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dataplex.v1.DataScan, com.google.cloud.dataplex.v1.DataScan.Builder, com.google.cloud.dataplex.v1.DataScanOrBuilder>( getDataScan(), getParentForChildren(), isClean()); dataScan_ = null; } return dataScanBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Optional. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Optional. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Optional. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Optional. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Optional. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Optional. Mask of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } private boolean validateOnly_; /** * * * <pre> * Optional. Only validate the request, but do not perform mutations. * The default is `false`. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The validateOnly. */ @java.lang.Override public boolean getValidateOnly() { return validateOnly_; } /** * * * <pre> * Optional. Only validate the request, but do not perform mutations. * The default is `false`. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The validateOnly to set. * @return This builder for chaining. */ public Builder setValidateOnly(boolean value) { validateOnly_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. Only validate the request, but do not perform mutations. * The default is `false`. * </pre> * * <code>bool validate_only = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearValidateOnly() { bitField0_ = (bitField0_ & ~0x00000004); validateOnly_ = false; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dataplex.v1.UpdateDataScanRequest) } // @@protoc_insertion_point(class_scope:google.cloud.dataplex.v1.UpdateDataScanRequest) private static final com.google.cloud.dataplex.v1.UpdateDataScanRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dataplex.v1.UpdateDataScanRequest(); } public static com.google.cloud.dataplex.v1.UpdateDataScanRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateDataScanRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateDataScanRequest>() { @java.lang.Override public UpdateDataScanRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateDataScanRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateDataScanRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dataplex.v1.UpdateDataScanRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,926
java-shopping-css/proto-google-shopping-css-v1/src/main/java/com/google/shopping/css/v1/InsertCssProductInputRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/shopping/css/v1/css_product_inputs.proto // Protobuf Java Version: 3.25.8 package com.google.shopping.css.v1; /** * * * <pre> * Request message for the InsertCssProductInput method. * </pre> * * Protobuf type {@code google.shopping.css.v1.InsertCssProductInputRequest} */ public final class InsertCssProductInputRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.shopping.css.v1.InsertCssProductInputRequest) InsertCssProductInputRequestOrBuilder { private static final long serialVersionUID = 0L; // Use InsertCssProductInputRequest.newBuilder() to construct. private InsertCssProductInputRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private InsertCssProductInputRequest() { parent_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new InsertCssProductInputRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.css.v1.CssProductInputsProto .internal_static_google_shopping_css_v1_InsertCssProductInputRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.css.v1.CssProductInputsProto .internal_static_google_shopping_css_v1_InsertCssProductInputRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.css.v1.InsertCssProductInputRequest.class, com.google.shopping.css.v1.InsertCssProductInputRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The account where this CSS Product will be inserted. * Format: accounts/{account} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The account where this CSS Product will be inserted. * Format: accounts/{account} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CSS_PRODUCT_INPUT_FIELD_NUMBER = 2; private com.google.shopping.css.v1.CssProductInput cssProductInput_; /** * * * <pre> * Required. The CSS Product Input to insert. * </pre> * * <code> * .google.shopping.css.v1.CssProductInput css_product_input = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the cssProductInput field is set. */ @java.lang.Override public boolean hasCssProductInput() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The CSS Product Input to insert. * </pre> * * <code> * .google.shopping.css.v1.CssProductInput css_product_input = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The cssProductInput. */ @java.lang.Override public com.google.shopping.css.v1.CssProductInput getCssProductInput() { return cssProductInput_ == null ? com.google.shopping.css.v1.CssProductInput.getDefaultInstance() : cssProductInput_; } /** * * * <pre> * Required. The CSS Product Input to insert. * </pre> * * <code> * .google.shopping.css.v1.CssProductInput css_product_input = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.shopping.css.v1.CssProductInputOrBuilder getCssProductInputOrBuilder() { return cssProductInput_ == null ? com.google.shopping.css.v1.CssProductInput.getDefaultInstance() : cssProductInput_; } public static final int FEED_ID_FIELD_NUMBER = 3; private long feedId_ = 0L; /** * * * <pre> * Optional. DEPRECATED. Feed id is not required for CSS Products. * The primary or supplemental feed id. If CSS Product already exists and * feed id provided is different, then the CSS Product will be moved to a * new feed. * Note: For now, CSSs do not need to provide feed ids as we create * feeds on the fly. * We do not have supplemental feed support for CSS Products yet. * </pre> * * <code>int64 feed_id = 3 [deprecated = true, (.google.api.field_behavior) = OPTIONAL];</code> * * @deprecated google.shopping.css.v1.InsertCssProductInputRequest.feed_id is deprecated. See * google/shopping/css/v1/css_product_inputs.proto;l=178 * @return The feedId. */ @java.lang.Override @java.lang.Deprecated public long getFeedId() { return feedId_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getCssProductInput()); } if (feedId_ != 0L) { output.writeInt64(3, feedId_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getCssProductInput()); } if (feedId_ != 0L) { size += com.google.protobuf.CodedOutputStream.computeInt64Size(3, feedId_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.shopping.css.v1.InsertCssProductInputRequest)) { return super.equals(obj); } com.google.shopping.css.v1.InsertCssProductInputRequest other = (com.google.shopping.css.v1.InsertCssProductInputRequest) obj; if (!getParent().equals(other.getParent())) return false; if (hasCssProductInput() != other.hasCssProductInput()) return false; if (hasCssProductInput()) { if (!getCssProductInput().equals(other.getCssProductInput())) return false; } if (getFeedId() != other.getFeedId()) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (hasCssProductInput()) { hash = (37 * hash) + CSS_PRODUCT_INPUT_FIELD_NUMBER; hash = (53 * hash) + getCssProductInput().hashCode(); } hash = (37 * hash) + FEED_ID_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong(getFeedId()); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.shopping.css.v1.InsertCssProductInputRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.css.v1.InsertCssProductInputRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.css.v1.InsertCssProductInputRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.css.v1.InsertCssProductInputRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.css.v1.InsertCssProductInputRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.shopping.css.v1.InsertCssProductInputRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.shopping.css.v1.InsertCssProductInputRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.css.v1.InsertCssProductInputRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.css.v1.InsertCssProductInputRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.shopping.css.v1.InsertCssProductInputRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.shopping.css.v1.InsertCssProductInputRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.shopping.css.v1.InsertCssProductInputRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.shopping.css.v1.InsertCssProductInputRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for the InsertCssProductInput method. * </pre> * * Protobuf type {@code google.shopping.css.v1.InsertCssProductInputRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.shopping.css.v1.InsertCssProductInputRequest) com.google.shopping.css.v1.InsertCssProductInputRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.shopping.css.v1.CssProductInputsProto .internal_static_google_shopping_css_v1_InsertCssProductInputRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.shopping.css.v1.CssProductInputsProto .internal_static_google_shopping_css_v1_InsertCssProductInputRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.shopping.css.v1.InsertCssProductInputRequest.class, com.google.shopping.css.v1.InsertCssProductInputRequest.Builder.class); } // Construct using com.google.shopping.css.v1.InsertCssProductInputRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getCssProductInputFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; cssProductInput_ = null; if (cssProductInputBuilder_ != null) { cssProductInputBuilder_.dispose(); cssProductInputBuilder_ = null; } feedId_ = 0L; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.shopping.css.v1.CssProductInputsProto .internal_static_google_shopping_css_v1_InsertCssProductInputRequest_descriptor; } @java.lang.Override public com.google.shopping.css.v1.InsertCssProductInputRequest getDefaultInstanceForType() { return com.google.shopping.css.v1.InsertCssProductInputRequest.getDefaultInstance(); } @java.lang.Override public com.google.shopping.css.v1.InsertCssProductInputRequest build() { com.google.shopping.css.v1.InsertCssProductInputRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.shopping.css.v1.InsertCssProductInputRequest buildPartial() { com.google.shopping.css.v1.InsertCssProductInputRequest result = new com.google.shopping.css.v1.InsertCssProductInputRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.shopping.css.v1.InsertCssProductInputRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.cssProductInput_ = cssProductInputBuilder_ == null ? cssProductInput_ : cssProductInputBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.feedId_ = feedId_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.shopping.css.v1.InsertCssProductInputRequest) { return mergeFrom((com.google.shopping.css.v1.InsertCssProductInputRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.shopping.css.v1.InsertCssProductInputRequest other) { if (other == com.google.shopping.css.v1.InsertCssProductInputRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasCssProductInput()) { mergeCssProductInput(other.getCssProductInput()); } if (other.getFeedId() != 0L) { setFeedId(other.getFeedId()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getCssProductInputFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 24: { feedId_ = input.readInt64(); bitField0_ |= 0x00000004; break; } // case 24 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The account where this CSS Product will be inserted. * Format: accounts/{account} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The account where this CSS Product will be inserted. * Format: accounts/{account} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The account where this CSS Product will be inserted. * Format: accounts/{account} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The account where this CSS Product will be inserted. * Format: accounts/{account} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The account where this CSS Product will be inserted. * Format: accounts/{account} * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.shopping.css.v1.CssProductInput cssProductInput_; private com.google.protobuf.SingleFieldBuilderV3< com.google.shopping.css.v1.CssProductInput, com.google.shopping.css.v1.CssProductInput.Builder, com.google.shopping.css.v1.CssProductInputOrBuilder> cssProductInputBuilder_; /** * * * <pre> * Required. The CSS Product Input to insert. * </pre> * * <code> * .google.shopping.css.v1.CssProductInput css_product_input = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the cssProductInput field is set. */ public boolean hasCssProductInput() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The CSS Product Input to insert. * </pre> * * <code> * .google.shopping.css.v1.CssProductInput css_product_input = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The cssProductInput. */ public com.google.shopping.css.v1.CssProductInput getCssProductInput() { if (cssProductInputBuilder_ == null) { return cssProductInput_ == null ? com.google.shopping.css.v1.CssProductInput.getDefaultInstance() : cssProductInput_; } else { return cssProductInputBuilder_.getMessage(); } } /** * * * <pre> * Required. The CSS Product Input to insert. * </pre> * * <code> * .google.shopping.css.v1.CssProductInput css_product_input = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setCssProductInput(com.google.shopping.css.v1.CssProductInput value) { if (cssProductInputBuilder_ == null) { if (value == null) { throw new NullPointerException(); } cssProductInput_ = value; } else { cssProductInputBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The CSS Product Input to insert. * </pre> * * <code> * .google.shopping.css.v1.CssProductInput css_product_input = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setCssProductInput( com.google.shopping.css.v1.CssProductInput.Builder builderForValue) { if (cssProductInputBuilder_ == null) { cssProductInput_ = builderForValue.build(); } else { cssProductInputBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The CSS Product Input to insert. * </pre> * * <code> * .google.shopping.css.v1.CssProductInput css_product_input = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeCssProductInput(com.google.shopping.css.v1.CssProductInput value) { if (cssProductInputBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && cssProductInput_ != null && cssProductInput_ != com.google.shopping.css.v1.CssProductInput.getDefaultInstance()) { getCssProductInputBuilder().mergeFrom(value); } else { cssProductInput_ = value; } } else { cssProductInputBuilder_.mergeFrom(value); } if (cssProductInput_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The CSS Product Input to insert. * </pre> * * <code> * .google.shopping.css.v1.CssProductInput css_product_input = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearCssProductInput() { bitField0_ = (bitField0_ & ~0x00000002); cssProductInput_ = null; if (cssProductInputBuilder_ != null) { cssProductInputBuilder_.dispose(); cssProductInputBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The CSS Product Input to insert. * </pre> * * <code> * .google.shopping.css.v1.CssProductInput css_product_input = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.shopping.css.v1.CssProductInput.Builder getCssProductInputBuilder() { bitField0_ |= 0x00000002; onChanged(); return getCssProductInputFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The CSS Product Input to insert. * </pre> * * <code> * .google.shopping.css.v1.CssProductInput css_product_input = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.shopping.css.v1.CssProductInputOrBuilder getCssProductInputOrBuilder() { if (cssProductInputBuilder_ != null) { return cssProductInputBuilder_.getMessageOrBuilder(); } else { return cssProductInput_ == null ? com.google.shopping.css.v1.CssProductInput.getDefaultInstance() : cssProductInput_; } } /** * * * <pre> * Required. The CSS Product Input to insert. * </pre> * * <code> * .google.shopping.css.v1.CssProductInput css_product_input = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.shopping.css.v1.CssProductInput, com.google.shopping.css.v1.CssProductInput.Builder, com.google.shopping.css.v1.CssProductInputOrBuilder> getCssProductInputFieldBuilder() { if (cssProductInputBuilder_ == null) { cssProductInputBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.shopping.css.v1.CssProductInput, com.google.shopping.css.v1.CssProductInput.Builder, com.google.shopping.css.v1.CssProductInputOrBuilder>( getCssProductInput(), getParentForChildren(), isClean()); cssProductInput_ = null; } return cssProductInputBuilder_; } private long feedId_; /** * * * <pre> * Optional. DEPRECATED. Feed id is not required for CSS Products. * The primary or supplemental feed id. If CSS Product already exists and * feed id provided is different, then the CSS Product will be moved to a * new feed. * Note: For now, CSSs do not need to provide feed ids as we create * feeds on the fly. * We do not have supplemental feed support for CSS Products yet. * </pre> * * <code>int64 feed_id = 3 [deprecated = true, (.google.api.field_behavior) = OPTIONAL];</code> * * @deprecated google.shopping.css.v1.InsertCssProductInputRequest.feed_id is deprecated. See * google/shopping/css/v1/css_product_inputs.proto;l=178 * @return The feedId. */ @java.lang.Override @java.lang.Deprecated public long getFeedId() { return feedId_; } /** * * * <pre> * Optional. DEPRECATED. Feed id is not required for CSS Products. * The primary or supplemental feed id. If CSS Product already exists and * feed id provided is different, then the CSS Product will be moved to a * new feed. * Note: For now, CSSs do not need to provide feed ids as we create * feeds on the fly. * We do not have supplemental feed support for CSS Products yet. * </pre> * * <code>int64 feed_id = 3 [deprecated = true, (.google.api.field_behavior) = OPTIONAL];</code> * * @deprecated google.shopping.css.v1.InsertCssProductInputRequest.feed_id is deprecated. See * google/shopping/css/v1/css_product_inputs.proto;l=178 * @param value The feedId to set. * @return This builder for chaining. */ @java.lang.Deprecated public Builder setFeedId(long value) { feedId_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. DEPRECATED. Feed id is not required for CSS Products. * The primary or supplemental feed id. If CSS Product already exists and * feed id provided is different, then the CSS Product will be moved to a * new feed. * Note: For now, CSSs do not need to provide feed ids as we create * feeds on the fly. * We do not have supplemental feed support for CSS Products yet. * </pre> * * <code>int64 feed_id = 3 [deprecated = true, (.google.api.field_behavior) = OPTIONAL];</code> * * @deprecated google.shopping.css.v1.InsertCssProductInputRequest.feed_id is deprecated. See * google/shopping/css/v1/css_product_inputs.proto;l=178 * @return This builder for chaining. */ @java.lang.Deprecated public Builder clearFeedId() { bitField0_ = (bitField0_ & ~0x00000004); feedId_ = 0L; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.shopping.css.v1.InsertCssProductInputRequest) } // @@protoc_insertion_point(class_scope:google.shopping.css.v1.InsertCssProductInputRequest) private static final com.google.shopping.css.v1.InsertCssProductInputRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.shopping.css.v1.InsertCssProductInputRequest(); } public static com.google.shopping.css.v1.InsertCssProductInputRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<InsertCssProductInputRequest> PARSER = new com.google.protobuf.AbstractParser<InsertCssProductInputRequest>() { @java.lang.Override public InsertCssProductInputRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<InsertCssProductInputRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<InsertCssProductInputRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.shopping.css.v1.InsertCssProductInputRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/ctakes
33,574
ctakes-temporal/src/main/java/org/apache/ctakes/temporal/nn/eval/EvaluationOfNeuralJointRelations.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.ctakes.temporal.nn.eval; import com.google.common.base.Function; import com.google.common.collect.*; import com.lexicalscope.jewel.cli.CliFactory; import com.lexicalscope.jewel.cli.Option; import org.apache.ctakes.relationextractor.eval.RelationExtractorEvaluation.HashableArguments; import org.apache.ctakes.temporal.eval.EvaluationOfEventTimeRelations.ParameterSettings; import org.apache.ctakes.temporal.eval.EvaluationOfTemporalRelations_ImplBase; import org.apache.ctakes.temporal.eval.Evaluation_ImplBase; import org.apache.ctakes.temporal.eval.I2B2Data; import org.apache.ctakes.temporal.eval.THYMEData; import org.apache.ctakes.temporal.keras.KerasStringOutcomeDataWriter; import org.apache.ctakes.temporal.keras.ScriptStringFeatureDataWriter; import org.apache.ctakes.temporal.nn.ae.WindowBasedAnnotator; import org.apache.ctakes.temporal.utils.AnnotationIdCollection; import org.apache.ctakes.temporal.utils.TLinkTypeArray2; import org.apache.ctakes.typesystem.type.relation.BinaryTextRelation; import org.apache.ctakes.typesystem.type.relation.RelationArgument; import org.apache.ctakes.typesystem.type.relation.TemporalTextRelation; import org.apache.ctakes.typesystem.type.textsem.EventMention; import org.apache.ctakes.typesystem.type.textsem.IdentifiedAnnotation; import org.apache.ctakes.typesystem.type.textsem.TimeMention; import org.apache.ctakes.typesystem.type.textspan.Sentence; import org.apache.uima.analysis_engine.AnalysisEngineDescription; import org.apache.uima.analysis_engine.AnalysisEngineProcessException; import org.apache.uima.cas.CAS; import org.apache.uima.cas.CASException; import org.apache.uima.collection.CollectionReader; import org.apache.uima.fit.component.JCasAnnotator_ImplBase; import org.apache.uima.fit.descriptor.ConfigurationParameter; import org.apache.uima.fit.factory.AggregateBuilder; import org.apache.uima.fit.factory.AnalysisEngineFactory; import org.apache.uima.fit.pipeline.JCasIterator; import org.apache.uima.fit.pipeline.SimplePipeline; import org.apache.uima.fit.util.JCasUtil; import org.apache.uima.jcas.JCas; import org.apache.uima.jcas.tcas.Annotation; import org.apache.uima.resource.ResourceInitializationException; import org.apache.uima.util.FileUtils; import org.cleartk.eval.AnnotationStatistics; import org.cleartk.ml.CleartkAnnotator; import org.cleartk.ml.jar.DefaultDataWriterFactory; import org.cleartk.ml.jar.DirectoryDataWriterFactory; import org.cleartk.ml.jar.GenericJarClassifierFactory; import org.cleartk.ml.jar.JarClassifierBuilder; import org.cleartk.ml.tksvmlight.model.CompositeKernel.ComboOperator; import org.cleartk.util.ViewUriUtil; import java.io.BufferedWriter; import java.io.File; import java.io.FileWriter; import java.io.PrintWriter; import java.net.URI; import java.util.*; /** * This is the evaluation code to evaluate a joint neural model for predicting temporal relations, including event-time and event-event relations. * @author chenlin * */ public class EvaluationOfNeuralJointRelations extends EvaluationOfTemporalRelations_ImplBase{ interface TempRelOptions extends Evaluation_ImplBase.Options{ @Option boolean getPrintFormattedRelations(); @Option boolean getBaseline(); @Option boolean getClosure(); @Option boolean getUseTmp(); @Option boolean getUseGoldAttributes(); @Option boolean getSkipTrain(); @Option boolean getWriteProbabilities(); @Option boolean getTestOnTrain(); @Option boolean getSkipWrite(); } // protected static boolean DEFAULT_BOTH_DIRECTIONS = false; // protected static float DEFAULT_DOWNSAMPLE = 1.0f; // private static double DEFAULT_SVM_C = 1.0; // private static double DEFAULT_SVM_G = 1.0; // private static double DEFAULT_TK = 0.5; // private static double DEFAULT_LAMBDA = 0.5; // defaultParams = new ParameterSettings(DEFAULT_BOTH_DIRECTIONS, DEFAULT_DOWNSAMPLE, "tk", // DEFAULT_SVM_C, DEFAULT_SVM_G, "polynomial", ComboOperator.SUM, DEFAULT_TK, DEFAULT_LAMBDA); protected static ParameterSettings flatParams = new ParameterSettings(DEFAULT_BOTH_DIRECTIONS, DEFAULT_DOWNSAMPLE, "linear", 10.0, 1.0, "linear", ComboOperator.VECTOR_ONLY, DEFAULT_TK, DEFAULT_LAMBDA); protected static ParameterSettings allBagsParams = new ParameterSettings(DEFAULT_BOTH_DIRECTIONS, DEFAULT_DOWNSAMPLE, "tk", 100.0, 0.1, "radial basis function", ComboOperator.SUM, 0.5, 0.5); protected static ParameterSettings allParams = new ParameterSettings(DEFAULT_BOTH_DIRECTIONS, DEFAULT_DOWNSAMPLE, "tk", 10.0, 1.0, "polynomial", ComboOperator.SUM, 0.1, 0.5); // (0.3, 0.4 for tklibsvm) protected static ParameterSettings ftParams = new ParameterSettings(DEFAULT_BOTH_DIRECTIONS, DEFAULT_DOWNSAMPLE, "tk", 1.0, 0.1, "radial basis function", ComboOperator.SUM, 0.5, 0.5); private static Boolean recallModeEvaluation = true; public static void main(String[] args) throws Exception { TempRelOptions options = CliFactory.parseArguments(TempRelOptions.class, args); List<Integer> trainItems = null; List<Integer> devItems = null; List<Integer> testItems = null; List<Integer> patientSets = options.getPatients().getList(); if(options.getXMLFormat() == XMLFormat.I2B2){ trainItems = I2B2Data.getTrainPatientSets(options.getXMLDirectory()); devItems = I2B2Data.getDevPatientSets(options.getXMLDirectory()); testItems = I2B2Data.getTestPatientSets(options.getXMLDirectory()); }else{ trainItems = THYMEData.getPatientSets(patientSets, options.getTrainRemainders().getList()); devItems = THYMEData.getPatientSets(patientSets, options.getDevRemainders().getList()); testItems = THYMEData.getPatientSets(patientSets, options.getTestRemainders().getList()); } ParameterSettings params = allParams; // possibleParams.add(defaultParams); // for(ParameterSettings params : possibleParams){ try{ File workingDir = new File("target/eval/thyme/");///Volumes/chip-nlp/Public/THYME/eval/thyme/");///Users/chenlin/Projects/THYME/modelFile"/Users/chenlin/Projects/deepLearning/models/selfTrainModel");//"target/eval/thyme/");//"/Volumes/chip-nlp/Public/THYME/eval/thyme/"); if(!workingDir.exists()) workingDir.mkdirs(); if(options.getUseTmp()){ File tempModelDir = File.createTempFile("temporal", null, workingDir); tempModelDir.delete(); tempModelDir.mkdir(); workingDir = tempModelDir; } EvaluationOfNeuralJointRelations evaluation = new EvaluationOfNeuralJointRelations( workingDir, options.getRawTextDirectory(), options.getXMLDirectory(), options.getXMLFormat(), options.getSubcorpus(), options.getXMIDirectory(), options.getTreebankDirectory(), options.getClosure(), options.getPrintErrors(), options.getPrintFormattedRelations(), options.getBaseline(), options.getUseGoldAttributes(), options.getKernelParams(), params); // evaluation.prepareXMIsFor(patientSets); if(options.getI2B2Output()!=null) evaluation.setI2B2Output(options.getI2B2Output() + "/train_and_test/joint");//"/temporal-relations/joint"); if(options.getAnaforaOutput()!=null) evaluation.anaforaOutput = options.getAnaforaOutput(); List<Integer> training = trainItems; //change train, dev, test here List<Integer> testing = null; if(options.getTest()){ training.addAll(devItems); testing = testItems; }else{ testing = devItems; } //do closure on system, but not on gold, to calculate recall evaluation.skipTrain = options.getSkipTrain(); evaluation.skipWrite = options.getSkipWrite(); if(evaluation.skipTrain && options.getTest()){ evaluation.prepareXMIsFor(testing); }else{ evaluation.prepareXMIsFor(patientSets); } //sort list: Collections.sort(training); Collections.sort(testing); //test or train or test evaluation.testOnTrain = options.getTestOnTrain(); if(evaluation.testOnTrain){ params.stats = evaluation.trainAndTest(training, training); }else{//test on testing set params.stats = evaluation.trainAndTest(training, testing);//training } System.err.println(params.stats); if(options.getUseTmp()){ // won't work because it's not empty. should we be concerned with this or is it responsibility of // person invoking the tmp flag? FileUtils.deleteRecursive(workingDir); } }catch(ResourceInitializationException e){ System.err.println("Error with parameter settings: " + params); e.printStackTrace(); } } // private ParameterSettings params; protected boolean useClosure; protected boolean useGoldAttributes; protected boolean skipTrain=false; public boolean skipWrite = false; // protected boolean printRelations = false; protected boolean testOnTrain=false; public EvaluationOfNeuralJointRelations( File baseDirectory, File rawTextDirectory, File xmlDirectory, XMLFormat xmlFormat, Subcorpus subcorpus, File xmiDirectory, File treebankDirectory, boolean useClosure, boolean printErrors, boolean printRelations, boolean baseline, boolean useGoldAttributes, String kernelParams, ParameterSettings params){ super( baseDirectory, rawTextDirectory, xmlDirectory, xmlFormat, subcorpus, xmiDirectory, treebankDirectory, printErrors, printRelations, params); this.params = params; this.useClosure = useClosure; this.printErrors = printErrors; this.printRelations = printRelations; this.useGoldAttributes = useGoldAttributes; this.kernelParams = kernelParams == null ? null : kernelParams.split(" "); } // public EvaluationOfTemporalRelations(File baseDirectory, File rawTextDirectory, // File knowtatorXMLDirectory, File xmiDirectory) { // // super(baseDirectory, rawTextDirectory, knowtatorXMLDirectory, xmiDirectory, null); // this.params = defaultParams; // this.printErrors = false; // } @Override protected void train(CollectionReader collectionReader, File directory) throws Exception { // if(this.baseline) return; if(this.skipTrain) return; if(!this.skipWrite){ AggregateBuilder aggregateBuilder = this.getPreprocessorAggregateBuilder(); aggregateBuilder.add(CopyFromGold.getDescription(EventMention.class, TimeMention.class, BinaryTextRelation.class)); // aggregateBuilder.add(AnalysisEngineFactory.createEngineDescription(RemoveCrossSentenceRelations.class)); if(!this.useGoldAttributes){ aggregateBuilder.add(AnalysisEngineFactory.createEngineDescription(RemoveGoldAttributes.class)); } if (this.useClosure) { aggregateBuilder.add(AnalysisEngineFactory.createEngineDescription(AddClosure.class)); } aggregateBuilder.add(AnalysisEngineFactory.createEngineDescription(RemoveNonContainsRelations.class)); aggregateBuilder.add( AnalysisEngineFactory.createEngineDescription(WindowBasedAnnotator.class,//WindowBasedAnnotator.class,//EventTimeTokenAndPathBasedAnnotator.class,// CleartkAnnotator.PARAM_IS_TRAINING, true, DefaultDataWriterFactory.PARAM_DATA_WRITER_CLASS_NAME, KerasStringOutcomeDataWriter.class, DirectoryDataWriterFactory.PARAM_OUTPUT_DIRECTORY, new File(directory,"joint"), ScriptStringFeatureDataWriter.PARAM_SCRIPT_DIR, "scripts/nn-joint/" ) ); SimplePipeline.runPipeline(collectionReader, aggregateBuilder.createAggregate()); } // HideOutput hider = new HideOutput(); JarClassifierBuilder.trainAndPackage(new File(directory,"joint")); } @Override protected AnnotationStatistics<String> test(CollectionReader collectionReader, File directory) throws Exception { this.useClosure=false; AggregateBuilder aggregateBuilder = this.getPreprocessorAggregateBuilder(); aggregateBuilder.add(CopyFromGold.getDescription(EventMention.class, TimeMention.class)); aggregateBuilder.add(CopyFromSystem.getDescription(Sentence.class)); aggregateBuilder.add(AnalysisEngineFactory.createEngineDescription(RemoveNonContainsRelations.class), CAS.NAME_DEFAULT_SOFA, GOLD_VIEW_NAME); aggregateBuilder.add(AnalysisEngineFactory.createEngineDescription(RemoveRelations.class)); AnalysisEngineDescription aed = null; aed = AnalysisEngineFactory.createEngineDescription(WindowBasedAnnotator.class,//WindowBasedAnnotator.class, CleartkAnnotator.PARAM_IS_TRAINING, false, GenericJarClassifierFactory.PARAM_CLASSIFIER_JAR_PATH, new File(new File(directory,"joint"), "model.jar").getPath()); aggregateBuilder.add(aed); //closure for system: // aggregateBuilder.add( // AnalysisEngineFactory.createEngineDescription(AddClosure.class)//AnalysisEngineFactory.createPrimitiveDescription(AddTransitiveContainsRelations.class), // ); // aed = DocTimeRelAnnotator.createAnnotatorDescription(new File("target/eval/event-properties/train_and_test/docTimeRel/model.jar").getAbsolutePath()); // aggregateBuilder.add(aed); // aggregateBuilder.add(AnalysisEngineFactory.createEngineDescription(CrossSentenceTemporalRelationAnnotator.class)); // aggregateBuilder.add(AnalysisEngineFactory.createEngineDescription(WithinSentenceBeforeRelationAnnotator.class)); if(this.anaforaOutput != null){ aed = AnalysisEngineFactory.createEngineDescription(WriteAnaforaXML.class, WriteAnaforaXML.PARAM_OUTPUT_DIR, this.anaforaOutput); aggregateBuilder.add(aed, "TimexView", CAS.NAME_DEFAULT_SOFA); } File outf = null; if (recallModeEvaluation && this.useClosure) {//add closure for system output aggregateBuilder.add( AnalysisEngineFactory.createEngineDescription(AddClosure.class),//AnalysisEngineFactory.createPrimitiveDescription(AddTransitiveContainsRelations.class), GOLD_VIEW_NAME, CAS.NAME_DEFAULT_SOFA ); outf = new File("target/brain_biLstm_recall_dev.txt"); }else if (!recallModeEvaluation && this.useClosure){ outf = new File("target/brain_biLstm_precision_dev.txt"); }else{ outf = new File("target/colon_ernie2filtered_contains_colon_test_closure.txt"); } PrintWriter outDrop =null; outDrop = new PrintWriter(new BufferedWriter(new FileWriter(outf, false))); Function<BinaryTextRelation, ?> getSpan = new Function<BinaryTextRelation, HashableArguments>() { public HashableArguments apply(BinaryTextRelation relation) { return new HashableArguments(relation); } }; Function<BinaryTextRelation, String> getOutcome = AnnotationStatistics.annotationToFeatureValue("category"); int withinSentRelations = 0; int crossSentRelations = 0; int withinSentCorrect = 0; int crossSentCorrect = 0; int withinSentGolds = 0; int crossSentGolds = 0; AnnotationStatistics<String> stats = new AnnotationStatistics<>(); JCasIterator jcasIter =new JCasIterator(collectionReader, aggregateBuilder.createAggregate()); JCas jCas = null; while(jcasIter.hasNext()) { jCas = jcasIter.next(); JCas goldView = jCas.getView(GOLD_VIEW_NAME); JCas systemView = jCas.getView(CAS.NAME_DEFAULT_SOFA); Map<Annotation, List<Sentence>> sentCoveringMap = JCasUtil.indexCovering(systemView, Annotation.class, Sentence.class); Map<Annotation, List<Sentence>> goldSentCoveringMap = JCasUtil.indexCovering(goldView, Annotation.class, Sentence.class); Collection<BinaryTextRelation> goldRelations = JCasUtil.select( goldView, BinaryTextRelation.class); Collection<BinaryTextRelation> systemRelations = JCasUtil.select( systemView, BinaryTextRelation.class); stats.add(goldRelations, systemRelations, getSpan, getOutcome); if(this.printRelations){ URI uri = ViewUriUtil.getURI(jCas); String[] path = uri.getPath().split("/"); printRelationAnnotations(path[path.length - 1], systemRelations); } if(this.printErrors){ Map<HashableArguments, BinaryTextRelation> goldMap = Maps.newHashMap(); for (BinaryTextRelation relation : goldRelations) { goldMap.put(new HashableArguments(relation), relation); } Map<HashableArguments, BinaryTextRelation> systemMap = Maps.newHashMap(); for (BinaryTextRelation relation : systemRelations) { systemMap.put(new HashableArguments(relation), relation); } Set<HashableArguments> all = Sets.union(goldMap.keySet(), systemMap.keySet()); List<HashableArguments> sorted = Lists.newArrayList(all); Collections.sort(sorted); if(jCas != null){ outDrop.println("Doc id: " + ViewUriUtil.getURI(jCas).toString()); for (HashableArguments key : sorted) { BinaryTextRelation goldRelation = goldMap.get(key); BinaryTextRelation systemRelation = systemMap.get(key); if (goldRelation == null) { //outDrop.println("System added: " + formatRelation(systemRelation)); if(checkArgumentsInTheSameSent(systemRelation, sentCoveringMap)){ withinSentRelations+=1; outDrop.println("System added within-sent: " + formatRelation(systemRelation)); }else{ crossSentRelations+=1; outDrop.println("System added cross-sent: " + formatRelation(systemRelation)); } } else if (systemRelation == null) { //outDrop.println("System dropped: " + formatRelation(goldRelation)); if(checkArgumentsInTheSameSent(goldRelation, goldSentCoveringMap)){ withinSentGolds+=1; outDrop.println("System dropped within-sent: " + formatRelation(goldRelation)); }else{ crossSentGolds+=1; outDrop.println("System dropped cross-sent: " + formatRelation(goldRelation)); } } else if (!systemRelation.getCategory().equals(goldRelation.getCategory())) { String label = systemRelation.getCategory(); //outDrop.printf("System labeled %s for %s\n", label, formatRelation(goldRelation)); if(checkArgumentsInTheSameSent(systemRelation, sentCoveringMap)){ withinSentRelations+=1; outDrop.printf("System labeled within-sent %s for %s\n", label, formatRelation(goldRelation)); }else{ crossSentRelations+=1; outDrop.printf("System labeled cross-sent %s for %s\n", label, formatRelation(goldRelation)); } if(checkArgumentsInTheSameSent(goldRelation, goldSentCoveringMap)){ withinSentGolds+=1; }else{ crossSentGolds+=1; } } else{ //outDrop.println("Nailed it! " + formatRelation(systemRelation)); if(checkArgumentsInTheSameSent(systemRelation, sentCoveringMap)){ withinSentRelations+=1; withinSentCorrect +=1; outDrop.println("Nailed it within-sent! " + formatRelation(systemRelation)); }else{ crossSentRelations+=1; crossSentCorrect +=1; outDrop.println("Nailed it cross-sent! " + formatRelation(systemRelation)); } if(checkArgumentsInTheSameSent(goldRelation, goldSentCoveringMap)){ withinSentGolds+=1; }else{ crossSentGolds+=1; } } } } } } System.out.print("There are "+ withinSentRelations + " within Sentence Predictions; " + withinSentCorrect+ " are correct predictions\n"); System.out.print("There are "+ crossSentRelations + " cross Sentence Predictions; " + crossSentCorrect+ " are correct predictions\n"); System.out.print("There are "+ crossSentGolds + " cross Sentence Gold Relations; " + withinSentGolds+ " are within-sent gold relations\n"); outDrop.close(); return stats; } private static boolean checkArgumentsInTheSameSent(BinaryTextRelation systemRelation, Map<Annotation, List<Sentence>> sentCoveringMap) { Annotation arg1 = systemRelation.getArg1().getArgument(); Annotation arg2 = systemRelation.getArg2().getArgument(); Collection<Sentence> sent1List = sentCoveringMap.get(arg1); Collection<Sentence> sent2List = sentCoveringMap.get(arg2); for(Sentence sent1 : sent1List){ for(Sentence sent2 : sent2List){ if(sent1==sent2){ return true; } } } return false; } public static class RemoveNonUMLSEvents extends org.apache.uima.fit.component.JCasAnnotator_ImplBase { public static final String PARAM_GOLD_VIEW = "GoldView"; @ConfigurationParameter(name = PARAM_GOLD_VIEW,mandatory=false) private String goldViewName = CAS.NAME_DEFAULT_SOFA; @Override public void process(JCas jCas) throws AnalysisEngineProcessException { JCas sysView; JCas goldView; try { sysView = jCas.getView(CAS.NAME_DEFAULT_SOFA); goldView = jCas.getView(PARAM_GOLD_VIEW); } catch (CASException e) { throw new AnalysisEngineProcessException(e); } for(TemporalTextRelation relation : Lists.newArrayList(JCasUtil.select(goldView, TemporalTextRelation.class))){ Annotation arg1 = relation.getArg1().getArgument(); Annotation arg2 = relation.getArg2().getArgument(); boolean arg1Valid = false; boolean arg2Valid = false; for (EventMention event : JCasUtil.selectCovered(sysView, EventMention.class, arg1)){ if(!event.getClass().equals(EventMention.class)){ arg1Valid = true; break; } } for (EventMention event : JCasUtil.selectCovered(sysView, EventMention.class, arg2)){ if(!event.getClass().equals(EventMention.class)){ arg2Valid = true; break; } } if(arg1Valid && arg2Valid){ // these are the kind we keep. continue; } arg1.removeFromIndexes(); arg2.removeFromIndexes(); relation.removeFromIndexes(); } } } static void createRelation(JCas jCas, Annotation arg1, Annotation arg2, String category) { RelationArgument relArg1 = new RelationArgument(jCas); relArg1.setArgument(arg1); relArg1.setRole("Arg1"); relArg1.addToIndexes(); RelationArgument relArg2 = new RelationArgument(jCas); relArg2.setArgument(arg2); relArg2.setRole("Arg2"); relArg2.addToIndexes(); TemporalTextRelation relation = new TemporalTextRelation(jCas); relation.setArg1(relArg1); relation.setArg2(relArg2); relation.setCategory(category); relation.addToIndexes(); } public static class RemoveNonTLINKRelations extends JCasAnnotator_ImplBase { @Override public void process(JCas jCas) throws AnalysisEngineProcessException { for (BinaryTextRelation relation : Lists.newArrayList(JCasUtil.select( jCas, BinaryTextRelation.class))) { if (!(relation instanceof TemporalTextRelation)) { relation.getArg1().removeFromIndexes(); relation.getArg2().removeFromIndexes(); relation.removeFromIndexes(); } } } } public static class RemoveCrossSentenceRelations extends org.apache.uima.fit.component.JCasAnnotator_ImplBase { public static final String PARAM_SENTENCE_VIEW = "SentenceView"; @ConfigurationParameter(name = PARAM_SENTENCE_VIEW,mandatory=false) private String sentenceViewName = CAS.NAME_DEFAULT_SOFA; public static final String PARAM_RELATION_VIEW = "RelationView"; @ConfigurationParameter(name = PARAM_RELATION_VIEW,mandatory=false) private String relationViewName = CAS.NAME_DEFAULT_SOFA; @Override public void process(JCas jCas) throws AnalysisEngineProcessException { JCas sentenceView, relationView; try { sentenceView = jCas.getView(this.sentenceViewName); relationView = jCas.getView(this.relationViewName); } catch (CASException e) { throw new AnalysisEngineProcessException(e); } // map events and times to the sentences that contain them Map<IdentifiedAnnotation, Integer> sentenceIndex = Maps.newHashMap(); int index = -1; for (Sentence sentence : JCasUtil.select(sentenceView, Sentence.class)) { ++index; for (EventMention event : JCasUtil.selectCovered(relationView, EventMention.class, sentence)) { sentenceIndex.put(event, index); } for (TimeMention time : JCasUtil.selectCovered(relationView, TimeMention.class, sentence)) { sentenceIndex.put(time, index); } } // remove any relations that are in different sentences. for (BinaryTextRelation relation : Lists.newArrayList(JCasUtil.select( relationView, BinaryTextRelation.class))) { Integer sent1 = sentenceIndex.get(relation.getArg1().getArgument()); Integer sent2 = sentenceIndex.get(relation.getArg2().getArgument()); if (sent1 == null || sent2 == null || !sent1.equals(sent2)) { relation.getArg1().removeFromIndexes(); relation.getArg2().removeFromIndexes(); relation.removeFromIndexes(); } } } } public static class RemoveRelations extends org.apache.uima.fit.component.JCasAnnotator_ImplBase { @Override public void process(JCas jCas) throws AnalysisEngineProcessException { for (BinaryTextRelation relation : Lists.newArrayList(JCasUtil.select( jCas, BinaryTextRelation.class))) { relation.getArg1().removeFromIndexes(); relation.getArg2().removeFromIndexes(); relation.removeFromIndexes(); } } } public static class AddTransitiveContainsRelations extends org.apache.uima.fit.component.JCasAnnotator_ImplBase { @Override public void process(JCas jCas) throws AnalysisEngineProcessException { // collect many-to-many mappings of containment relations Multimap<Annotation, Annotation> isContainedIn = HashMultimap.create(); Multimap<Annotation, Annotation> contains = HashMultimap.create(); Set<BinaryTextRelation> containsRelations = Sets.newHashSet(); for (BinaryTextRelation relation : JCasUtil.select(jCas, BinaryTextRelation.class)) { if (relation.getCategory().equals("CONTAINS")) { containsRelations.add(relation); Annotation arg1 = relation.getArg1().getArgument(); Annotation arg2 = relation.getArg2().getArgument(); contains.put(arg1, arg2); isContainedIn.put(arg2, arg1); } } // look for X -> Y -> Z containment chains and add X -> Z relations Deque<Annotation> todo = new ArrayDeque<>(isContainedIn.keySet()); while (!todo.isEmpty()) { Annotation next = todo.removeFirst(); for (Annotation parent : Lists.newArrayList(isContainedIn.get(next))) { for (Annotation grandParent : Lists.newArrayList(isContainedIn.get(parent))) { if (!isContainedIn.containsEntry(next, grandParent)) { isContainedIn.put(next, grandParent); contains.put(grandParent, next); // once X -> Z has been added, we need to re-do all W where W -> X for (Annotation child : contains.get(next)) { todo.add(child); } } } } } // remove old relations for (BinaryTextRelation relation : containsRelations) { relation.getArg1().removeFromIndexes(); relation.getArg2().removeFromIndexes(); relation.removeFromIndexes(); } // add new, transitive relations for (Annotation contained : isContainedIn.keySet()) { for (Annotation container : isContainedIn.get(contained)) { RelationArgument arg1 = new RelationArgument(jCas); arg1.setArgument(container); RelationArgument arg2 = new RelationArgument(jCas); arg2.setArgument(contained); BinaryTextRelation relation = new BinaryTextRelation(jCas); relation.setArg1(arg1); relation.setArg2(arg2); relation.setCategory("CONTAINS"); arg1.addToIndexes(); arg2.addToIndexes(); relation.addToIndexes(); } } } } public static class AddContain2Overlap extends org.apache.uima.fit.component.JCasAnnotator_ImplBase { @Override public void process(JCas jCas) throws AnalysisEngineProcessException { Set<BinaryTextRelation> containsRelations = Sets.newHashSet(); for (BinaryTextRelation relation : JCasUtil.select(jCas, BinaryTextRelation.class)) { if (relation.getCategory().equals("CONTAINS")) { containsRelations.add(relation); } } for (BinaryTextRelation relation : containsRelations) { RelationArgument arg1 = (RelationArgument) relation.getArg1().clone(); RelationArgument arg2 = (RelationArgument) relation.getArg2().clone(); BinaryTextRelation newrelation = new BinaryTextRelation(jCas); newrelation.setArg1(arg1); newrelation.setArg2(arg2); newrelation.setCategory("OVERLAP"); arg1.addToIndexes(); arg2.addToIndexes(); newrelation.addToIndexes(); } } } public static class AddFlippedOverlap extends org.apache.uima.fit.component.JCasAnnotator_ImplBase { @Override public void process(JCas jCas) throws AnalysisEngineProcessException { Set<BinaryTextRelation> overlapRelations = Sets.newHashSet(); Multimap<Annotation, Annotation> overlaps = HashMultimap.create(); for (BinaryTextRelation relation : JCasUtil.select(jCas, BinaryTextRelation.class)) { if (relation.getCategory().equals("OVERLAP")) { overlapRelations.add(relation); Annotation arg1 = relation.getArg1().getArgument(); Annotation arg2 = relation.getArg2().getArgument(); overlaps.put(arg1, arg2); } } for (BinaryTextRelation orelation : overlapRelations) { Annotation argA = orelation.getArg1().getArgument(); Annotation argB = orelation.getArg2().getArgument(); //add overlap if (!overlaps.containsEntry(argB, argA)) { //create a new flipped relation: RelationArgument arg1 = new RelationArgument(jCas); arg1.setArgument(argB); RelationArgument arg2 = new RelationArgument(jCas); arg2.setArgument(argA); BinaryTextRelation relation = new BinaryTextRelation(jCas); relation.setArg1(arg1); relation.setArg2(arg2); relation.setCategory("OVERLAP"); arg1.addToIndexes(); arg2.addToIndexes(); relation.addToIndexes(); overlaps.put(argB, argA); } } } } public static class AddClosure extends JCasAnnotator_ImplBase { @Override public void process(JCas jCas) throws AnalysisEngineProcessException { Multimap<List<Annotation>, BinaryTextRelation> annotationsToRelation = HashMultimap.create(); for (BinaryTextRelation relation : JCasUtil.select(jCas, BinaryTextRelation.class)){ String relationType = relation.getCategory(); if(validTemporalType(relationType)){ Annotation arg1 = relation.getArg1().getArgument(); Annotation arg2 = relation.getArg2().getArgument(); annotationsToRelation.put(Arrays.asList(arg1, arg2), relation); } } for (List<Annotation> span: Lists.newArrayList(annotationsToRelation.keySet())){ Collection<BinaryTextRelation> relations = annotationsToRelation.get(span); if(relations.size()>1){//if same span maps to multiple relations Set<String> types = Sets.newHashSet(); for(BinaryTextRelation relation: relations){ types.add(relation.getCategory()); } if(types.size()>1){ for(BinaryTextRelation relation: Lists.newArrayList(relations)){ annotationsToRelation.remove(span, relation); relation.getArg1().removeFromIndexes(); relation.getArg2().removeFromIndexes(); relation.removeFromIndexes(); } }else if(types.size()==1){ for (int i =1; i< relations.size(); i++){ BinaryTextRelation relation = (BinaryTextRelation) relations.toArray()[i]; annotationsToRelation.remove(span, relation); relation.getArg1().removeFromIndexes(); relation.getArg2().removeFromIndexes(); relation.removeFromIndexes(); } } } } ArrayList<BinaryTextRelation> temporalRelation = new ArrayList<>(annotationsToRelation.values());//new ArrayList<BinaryTextRelation>(); if (!temporalRelation.isEmpty()){ TLinkTypeArray2 relationArray = new TLinkTypeArray2(temporalRelation, new AnnotationIdCollection(temporalRelation)); int addedCount = 0; for (BinaryTextRelation relation : relationArray.getClosedTlinks(jCas)) { RelationArgument arg1 = relation.getArg1(); RelationArgument arg2 = relation.getArg2(); String relationType = relation.getCategory(); if(relationType.equals("CONTAINED-BY")||relationType.equals("AFTER")){//ignore these two categories, because their reciprocal already exist. continue; } //check if the inferred relation new: Collection<BinaryTextRelation> relations = annotationsToRelation.get(Arrays.asList(arg1.getArgument(), arg2.getArgument())); if(relations.isEmpty()){ //if haven't seen this inferred relation before, then add this relation arg1.addToIndexes(); arg2.addToIndexes(); relation.addToIndexes(); addedCount++; } } System.out.println( "**************************************************************"); System.out.println( "Finally added closure relations: " + addedCount ); System.out.println( "**************************************************************"); } } private static boolean validTemporalType(String relationType) { if(relationType.equals("CONTAINS")||relationType.equals("OVERLAP")||relationType.equals("BEFORE")||relationType.equals("ENDS-ON")||relationType.equals("BEGINS-ON")) return true; return false; } } }
googleapis/google-cloud-java
35,959
java-analytics-data/proto-google-analytics-data-v1alpha/src/main/java/com/google/analytics/data/v1alpha/FunnelEventFilter.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/analytics/data/v1alpha/data.proto // Protobuf Java Version: 3.25.8 package com.google.analytics.data.v1alpha; /** * * * <pre> * Creates a filter that matches events of a single event name. If a parameter * filter expression is specified, only the subset of events that match both the * single event name and the parameter filter expressions match this event * filter. * </pre> * * Protobuf type {@code google.analytics.data.v1alpha.FunnelEventFilter} */ public final class FunnelEventFilter extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.analytics.data.v1alpha.FunnelEventFilter) FunnelEventFilterOrBuilder { private static final long serialVersionUID = 0L; // Use FunnelEventFilter.newBuilder() to construct. private FunnelEventFilter(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private FunnelEventFilter() { eventName_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new FunnelEventFilter(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.analytics.data.v1alpha.ReportingApiProto .internal_static_google_analytics_data_v1alpha_FunnelEventFilter_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.analytics.data.v1alpha.ReportingApiProto .internal_static_google_analytics_data_v1alpha_FunnelEventFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.analytics.data.v1alpha.FunnelEventFilter.class, com.google.analytics.data.v1alpha.FunnelEventFilter.Builder.class); } private int bitField0_; public static final int EVENT_NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object eventName_ = ""; /** * * * <pre> * This filter matches events of this single event name. Event name is * required. * </pre> * * <code>optional string event_name = 1;</code> * * @return Whether the eventName field is set. */ @java.lang.Override public boolean hasEventName() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * This filter matches events of this single event name. Event name is * required. * </pre> * * <code>optional string event_name = 1;</code> * * @return The eventName. */ @java.lang.Override public java.lang.String getEventName() { java.lang.Object ref = eventName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); eventName_ = s; return s; } } /** * * * <pre> * This filter matches events of this single event name. Event name is * required. * </pre> * * <code>optional string event_name = 1;</code> * * @return The bytes for eventName. */ @java.lang.Override public com.google.protobuf.ByteString getEventNameBytes() { java.lang.Object ref = eventName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); eventName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FUNNEL_PARAMETER_FILTER_EXPRESSION_FIELD_NUMBER = 2; private com.google.analytics.data.v1alpha.FunnelParameterFilterExpression funnelParameterFilterExpression_; /** * * * <pre> * If specified, this filter matches events that match both the single event * name and the parameter filter expressions. * * Inside the parameter filter expression, only parameter filters are * available. * </pre> * * <code> * optional .google.analytics.data.v1alpha.FunnelParameterFilterExpression funnel_parameter_filter_expression = 2; * </code> * * @return Whether the funnelParameterFilterExpression field is set. */ @java.lang.Override public boolean hasFunnelParameterFilterExpression() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * If specified, this filter matches events that match both the single event * name and the parameter filter expressions. * * Inside the parameter filter expression, only parameter filters are * available. * </pre> * * <code> * optional .google.analytics.data.v1alpha.FunnelParameterFilterExpression funnel_parameter_filter_expression = 2; * </code> * * @return The funnelParameterFilterExpression. */ @java.lang.Override public com.google.analytics.data.v1alpha.FunnelParameterFilterExpression getFunnelParameterFilterExpression() { return funnelParameterFilterExpression_ == null ? com.google.analytics.data.v1alpha.FunnelParameterFilterExpression.getDefaultInstance() : funnelParameterFilterExpression_; } /** * * * <pre> * If specified, this filter matches events that match both the single event * name and the parameter filter expressions. * * Inside the parameter filter expression, only parameter filters are * available. * </pre> * * <code> * optional .google.analytics.data.v1alpha.FunnelParameterFilterExpression funnel_parameter_filter_expression = 2; * </code> */ @java.lang.Override public com.google.analytics.data.v1alpha.FunnelParameterFilterExpressionOrBuilder getFunnelParameterFilterExpressionOrBuilder() { return funnelParameterFilterExpression_ == null ? com.google.analytics.data.v1alpha.FunnelParameterFilterExpression.getDefaultInstance() : funnelParameterFilterExpression_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, eventName_); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getFunnelParameterFilterExpression()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, eventName_); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 2, getFunnelParameterFilterExpression()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.analytics.data.v1alpha.FunnelEventFilter)) { return super.equals(obj); } com.google.analytics.data.v1alpha.FunnelEventFilter other = (com.google.analytics.data.v1alpha.FunnelEventFilter) obj; if (hasEventName() != other.hasEventName()) return false; if (hasEventName()) { if (!getEventName().equals(other.getEventName())) return false; } if (hasFunnelParameterFilterExpression() != other.hasFunnelParameterFilterExpression()) return false; if (hasFunnelParameterFilterExpression()) { if (!getFunnelParameterFilterExpression().equals(other.getFunnelParameterFilterExpression())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasEventName()) { hash = (37 * hash) + EVENT_NAME_FIELD_NUMBER; hash = (53 * hash) + getEventName().hashCode(); } if (hasFunnelParameterFilterExpression()) { hash = (37 * hash) + FUNNEL_PARAMETER_FILTER_EXPRESSION_FIELD_NUMBER; hash = (53 * hash) + getFunnelParameterFilterExpression().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.analytics.data.v1alpha.FunnelEventFilter parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.data.v1alpha.FunnelEventFilter parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.data.v1alpha.FunnelEventFilter parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.data.v1alpha.FunnelEventFilter parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.data.v1alpha.FunnelEventFilter parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.analytics.data.v1alpha.FunnelEventFilter parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.analytics.data.v1alpha.FunnelEventFilter parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.analytics.data.v1alpha.FunnelEventFilter parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.analytics.data.v1alpha.FunnelEventFilter parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.analytics.data.v1alpha.FunnelEventFilter parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.analytics.data.v1alpha.FunnelEventFilter parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.analytics.data.v1alpha.FunnelEventFilter parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.analytics.data.v1alpha.FunnelEventFilter prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Creates a filter that matches events of a single event name. If a parameter * filter expression is specified, only the subset of events that match both the * single event name and the parameter filter expressions match this event * filter. * </pre> * * Protobuf type {@code google.analytics.data.v1alpha.FunnelEventFilter} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.analytics.data.v1alpha.FunnelEventFilter) com.google.analytics.data.v1alpha.FunnelEventFilterOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.analytics.data.v1alpha.ReportingApiProto .internal_static_google_analytics_data_v1alpha_FunnelEventFilter_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.analytics.data.v1alpha.ReportingApiProto .internal_static_google_analytics_data_v1alpha_FunnelEventFilter_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.analytics.data.v1alpha.FunnelEventFilter.class, com.google.analytics.data.v1alpha.FunnelEventFilter.Builder.class); } // Construct using com.google.analytics.data.v1alpha.FunnelEventFilter.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getFunnelParameterFilterExpressionFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; eventName_ = ""; funnelParameterFilterExpression_ = null; if (funnelParameterFilterExpressionBuilder_ != null) { funnelParameterFilterExpressionBuilder_.dispose(); funnelParameterFilterExpressionBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.analytics.data.v1alpha.ReportingApiProto .internal_static_google_analytics_data_v1alpha_FunnelEventFilter_descriptor; } @java.lang.Override public com.google.analytics.data.v1alpha.FunnelEventFilter getDefaultInstanceForType() { return com.google.analytics.data.v1alpha.FunnelEventFilter.getDefaultInstance(); } @java.lang.Override public com.google.analytics.data.v1alpha.FunnelEventFilter build() { com.google.analytics.data.v1alpha.FunnelEventFilter result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.analytics.data.v1alpha.FunnelEventFilter buildPartial() { com.google.analytics.data.v1alpha.FunnelEventFilter result = new com.google.analytics.data.v1alpha.FunnelEventFilter(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.analytics.data.v1alpha.FunnelEventFilter result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.eventName_ = eventName_; to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.funnelParameterFilterExpression_ = funnelParameterFilterExpressionBuilder_ == null ? funnelParameterFilterExpression_ : funnelParameterFilterExpressionBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.analytics.data.v1alpha.FunnelEventFilter) { return mergeFrom((com.google.analytics.data.v1alpha.FunnelEventFilter) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.analytics.data.v1alpha.FunnelEventFilter other) { if (other == com.google.analytics.data.v1alpha.FunnelEventFilter.getDefaultInstance()) return this; if (other.hasEventName()) { eventName_ = other.eventName_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasFunnelParameterFilterExpression()) { mergeFunnelParameterFilterExpression(other.getFunnelParameterFilterExpression()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { eventName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getFunnelParameterFilterExpressionFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object eventName_ = ""; /** * * * <pre> * This filter matches events of this single event name. Event name is * required. * </pre> * * <code>optional string event_name = 1;</code> * * @return Whether the eventName field is set. */ public boolean hasEventName() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * This filter matches events of this single event name. Event name is * required. * </pre> * * <code>optional string event_name = 1;</code> * * @return The eventName. */ public java.lang.String getEventName() { java.lang.Object ref = eventName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); eventName_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * This filter matches events of this single event name. Event name is * required. * </pre> * * <code>optional string event_name = 1;</code> * * @return The bytes for eventName. */ public com.google.protobuf.ByteString getEventNameBytes() { java.lang.Object ref = eventName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); eventName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * This filter matches events of this single event name. Event name is * required. * </pre> * * <code>optional string event_name = 1;</code> * * @param value The eventName to set. * @return This builder for chaining. */ public Builder setEventName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } eventName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * This filter matches events of this single event name. Event name is * required. * </pre> * * <code>optional string event_name = 1;</code> * * @return This builder for chaining. */ public Builder clearEventName() { eventName_ = getDefaultInstance().getEventName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * This filter matches events of this single event name. Event name is * required. * </pre> * * <code>optional string event_name = 1;</code> * * @param value The bytes for eventName to set. * @return This builder for chaining. */ public Builder setEventNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); eventName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.analytics.data.v1alpha.FunnelParameterFilterExpression funnelParameterFilterExpression_; private com.google.protobuf.SingleFieldBuilderV3< com.google.analytics.data.v1alpha.FunnelParameterFilterExpression, com.google.analytics.data.v1alpha.FunnelParameterFilterExpression.Builder, com.google.analytics.data.v1alpha.FunnelParameterFilterExpressionOrBuilder> funnelParameterFilterExpressionBuilder_; /** * * * <pre> * If specified, this filter matches events that match both the single event * name and the parameter filter expressions. * * Inside the parameter filter expression, only parameter filters are * available. * </pre> * * <code> * optional .google.analytics.data.v1alpha.FunnelParameterFilterExpression funnel_parameter_filter_expression = 2; * </code> * * @return Whether the funnelParameterFilterExpression field is set. */ public boolean hasFunnelParameterFilterExpression() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * If specified, this filter matches events that match both the single event * name and the parameter filter expressions. * * Inside the parameter filter expression, only parameter filters are * available. * </pre> * * <code> * optional .google.analytics.data.v1alpha.FunnelParameterFilterExpression funnel_parameter_filter_expression = 2; * </code> * * @return The funnelParameterFilterExpression. */ public com.google.analytics.data.v1alpha.FunnelParameterFilterExpression getFunnelParameterFilterExpression() { if (funnelParameterFilterExpressionBuilder_ == null) { return funnelParameterFilterExpression_ == null ? com.google.analytics.data.v1alpha.FunnelParameterFilterExpression.getDefaultInstance() : funnelParameterFilterExpression_; } else { return funnelParameterFilterExpressionBuilder_.getMessage(); } } /** * * * <pre> * If specified, this filter matches events that match both the single event * name and the parameter filter expressions. * * Inside the parameter filter expression, only parameter filters are * available. * </pre> * * <code> * optional .google.analytics.data.v1alpha.FunnelParameterFilterExpression funnel_parameter_filter_expression = 2; * </code> */ public Builder setFunnelParameterFilterExpression( com.google.analytics.data.v1alpha.FunnelParameterFilterExpression value) { if (funnelParameterFilterExpressionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } funnelParameterFilterExpression_ = value; } else { funnelParameterFilterExpressionBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * If specified, this filter matches events that match both the single event * name and the parameter filter expressions. * * Inside the parameter filter expression, only parameter filters are * available. * </pre> * * <code> * optional .google.analytics.data.v1alpha.FunnelParameterFilterExpression funnel_parameter_filter_expression = 2; * </code> */ public Builder setFunnelParameterFilterExpression( com.google.analytics.data.v1alpha.FunnelParameterFilterExpression.Builder builderForValue) { if (funnelParameterFilterExpressionBuilder_ == null) { funnelParameterFilterExpression_ = builderForValue.build(); } else { funnelParameterFilterExpressionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * If specified, this filter matches events that match both the single event * name and the parameter filter expressions. * * Inside the parameter filter expression, only parameter filters are * available. * </pre> * * <code> * optional .google.analytics.data.v1alpha.FunnelParameterFilterExpression funnel_parameter_filter_expression = 2; * </code> */ public Builder mergeFunnelParameterFilterExpression( com.google.analytics.data.v1alpha.FunnelParameterFilterExpression value) { if (funnelParameterFilterExpressionBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && funnelParameterFilterExpression_ != null && funnelParameterFilterExpression_ != com.google.analytics.data.v1alpha.FunnelParameterFilterExpression .getDefaultInstance()) { getFunnelParameterFilterExpressionBuilder().mergeFrom(value); } else { funnelParameterFilterExpression_ = value; } } else { funnelParameterFilterExpressionBuilder_.mergeFrom(value); } if (funnelParameterFilterExpression_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * If specified, this filter matches events that match both the single event * name and the parameter filter expressions. * * Inside the parameter filter expression, only parameter filters are * available. * </pre> * * <code> * optional .google.analytics.data.v1alpha.FunnelParameterFilterExpression funnel_parameter_filter_expression = 2; * </code> */ public Builder clearFunnelParameterFilterExpression() { bitField0_ = (bitField0_ & ~0x00000002); funnelParameterFilterExpression_ = null; if (funnelParameterFilterExpressionBuilder_ != null) { funnelParameterFilterExpressionBuilder_.dispose(); funnelParameterFilterExpressionBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * If specified, this filter matches events that match both the single event * name and the parameter filter expressions. * * Inside the parameter filter expression, only parameter filters are * available. * </pre> * * <code> * optional .google.analytics.data.v1alpha.FunnelParameterFilterExpression funnel_parameter_filter_expression = 2; * </code> */ public com.google.analytics.data.v1alpha.FunnelParameterFilterExpression.Builder getFunnelParameterFilterExpressionBuilder() { bitField0_ |= 0x00000002; onChanged(); return getFunnelParameterFilterExpressionFieldBuilder().getBuilder(); } /** * * * <pre> * If specified, this filter matches events that match both the single event * name and the parameter filter expressions. * * Inside the parameter filter expression, only parameter filters are * available. * </pre> * * <code> * optional .google.analytics.data.v1alpha.FunnelParameterFilterExpression funnel_parameter_filter_expression = 2; * </code> */ public com.google.analytics.data.v1alpha.FunnelParameterFilterExpressionOrBuilder getFunnelParameterFilterExpressionOrBuilder() { if (funnelParameterFilterExpressionBuilder_ != null) { return funnelParameterFilterExpressionBuilder_.getMessageOrBuilder(); } else { return funnelParameterFilterExpression_ == null ? com.google.analytics.data.v1alpha.FunnelParameterFilterExpression.getDefaultInstance() : funnelParameterFilterExpression_; } } /** * * * <pre> * If specified, this filter matches events that match both the single event * name and the parameter filter expressions. * * Inside the parameter filter expression, only parameter filters are * available. * </pre> * * <code> * optional .google.analytics.data.v1alpha.FunnelParameterFilterExpression funnel_parameter_filter_expression = 2; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.analytics.data.v1alpha.FunnelParameterFilterExpression, com.google.analytics.data.v1alpha.FunnelParameterFilterExpression.Builder, com.google.analytics.data.v1alpha.FunnelParameterFilterExpressionOrBuilder> getFunnelParameterFilterExpressionFieldBuilder() { if (funnelParameterFilterExpressionBuilder_ == null) { funnelParameterFilterExpressionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.analytics.data.v1alpha.FunnelParameterFilterExpression, com.google.analytics.data.v1alpha.FunnelParameterFilterExpression.Builder, com.google.analytics.data.v1alpha.FunnelParameterFilterExpressionOrBuilder>( getFunnelParameterFilterExpression(), getParentForChildren(), isClean()); funnelParameterFilterExpression_ = null; } return funnelParameterFilterExpressionBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.analytics.data.v1alpha.FunnelEventFilter) } // @@protoc_insertion_point(class_scope:google.analytics.data.v1alpha.FunnelEventFilter) private static final com.google.analytics.data.v1alpha.FunnelEventFilter DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.analytics.data.v1alpha.FunnelEventFilter(); } public static com.google.analytics.data.v1alpha.FunnelEventFilter getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<FunnelEventFilter> PARSER = new com.google.protobuf.AbstractParser<FunnelEventFilter>() { @java.lang.Override public FunnelEventFilter parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<FunnelEventFilter> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<FunnelEventFilter> getParserForType() { return PARSER; } @java.lang.Override public com.google.analytics.data.v1alpha.FunnelEventFilter getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/ozone
36,144
hadoop-ozone/recon/src/main/java/org/apache/hadoop/ozone/recon/ReconUtils.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.ozone.recon; import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_EVENT_CONTAINER_REPORT_QUEUE_SIZE_DEFAULT; import static org.apache.hadoop.hdds.scm.ScmConfigKeys.OZONE_SCM_EVENT_THREAD_POOL_SIZE_DEFAULT; import static org.apache.hadoop.hdds.server.ServerUtils.getDirectoryFromConfig; import static org.apache.hadoop.hdds.server.ServerUtils.getOzoneMetaDirPath; import static org.apache.hadoop.ozone.OzoneConsts.OM_KEY_PREFIX; import static org.apache.hadoop.ozone.recon.ReconServerConfigKeys.OZONE_RECON_SCM_DB_DIR; import static org.jooq.impl.DSL.currentTimestamp; import static org.jooq.impl.DSL.select; import static org.jooq.impl.DSL.using; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Preconditions; import com.google.inject.Singleton; import jakarta.annotation.Nonnull; import java.io.File; import java.io.IOException; import java.net.HttpURLConnection; import java.net.URL; import java.nio.file.Path; import java.sql.Timestamp; import java.text.ParseException; import java.text.SimpleDateFormat; import java.time.Instant; import java.util.ArrayList; import java.util.Collections; import java.util.Date; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Set; import java.util.TimeZone; import java.util.concurrent.BlockingQueue; import java.util.stream.Collectors; import javax.ws.rs.core.Response; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.StringUtils; import org.apache.hadoop.hdds.HddsConfigKeys; import org.apache.hadoop.hdds.conf.ConfigurationSource; import org.apache.hadoop.hdds.conf.OzoneConfiguration; import org.apache.hadoop.hdds.scm.ScmUtils; import org.apache.hadoop.hdds.scm.ha.SCMNodeDetails; import org.apache.hadoop.hdds.scm.server.OzoneStorageContainerManager; import org.apache.hadoop.hdds.scm.server.SCMDatanodeHeartbeatDispatcher; import org.apache.hadoop.hdds.utils.Archiver; import org.apache.hadoop.hdds.utils.HddsServerUtil; import org.apache.hadoop.hdds.utils.db.Table; import org.apache.hadoop.hdds.utils.db.TableIterator; import org.apache.hadoop.hdfs.web.URLConnectionFactory; import org.apache.hadoop.ozone.OmUtils; import org.apache.hadoop.ozone.OzoneConsts; import org.apache.hadoop.ozone.om.helpers.BucketLayout; import org.apache.hadoop.ozone.om.helpers.OmBucketInfo; import org.apache.hadoop.ozone.om.helpers.OmDirectoryInfo; import org.apache.hadoop.ozone.om.helpers.OmKeyInfo; import org.apache.hadoop.ozone.recon.api.handlers.BucketHandler; import org.apache.hadoop.ozone.recon.api.handlers.EntityHandler; import org.apache.hadoop.ozone.recon.api.types.DUResponse; import org.apache.hadoop.ozone.recon.api.types.NSSummary; import org.apache.hadoop.ozone.recon.recovery.ReconOMMetadataManager; import org.apache.hadoop.ozone.recon.scm.ReconContainerReportQueue; import org.apache.hadoop.ozone.recon.spi.ReconNamespaceSummaryManager; import org.apache.hadoop.security.authentication.client.AuthenticationException; import org.apache.ozone.recon.schema.generated.tables.daos.GlobalStatsDao; import org.apache.ozone.recon.schema.generated.tables.pojos.GlobalStats; import org.jooq.Configuration; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** * Recon Utility class. */ @Singleton public class ReconUtils { private static Logger log = LoggerFactory.getLogger( ReconUtils.class); public ReconUtils() { } /** * Get the current rebuild state of NSSummary tree. * Delegates to NSSummaryTask's unified control mechanism. * * @return current RebuildState from NSSummaryTask */ public static org.apache.hadoop.ozone.recon.tasks.NSSummaryTask.RebuildState getNSSummaryRebuildState() { return org.apache.hadoop.ozone.recon.tasks.NSSummaryTask.getRebuildState(); } public static File getReconScmDbDir(ConfigurationSource conf) { return new ReconUtils().getReconDbDir(conf, OZONE_RECON_SCM_DB_DIR); } @Nonnull public static List<BlockingQueue<SCMDatanodeHeartbeatDispatcher .ContainerReport>> initContainerReportQueue( OzoneConfiguration configuration) { int threadPoolSize = configuration.getInt(ScmUtils.getContainerReportConfPrefix() + ".thread.pool.size", OZONE_SCM_EVENT_THREAD_POOL_SIZE_DEFAULT); int queueSize = configuration.getInt( ScmUtils.getContainerReportConfPrefix() + ".queue.size", OZONE_SCM_EVENT_CONTAINER_REPORT_QUEUE_SIZE_DEFAULT); List<BlockingQueue<SCMDatanodeHeartbeatDispatcher.ContainerReport>> queues = new ArrayList<>(); for (int i = 0; i < threadPoolSize; ++i) { queues.add(new ReconContainerReportQueue(queueSize)); } return queues; } /** * Get configured Recon DB directory value based on config. If not present, * fallback to ozone.metadata.dirs * * @param conf configuration bag * @param dirConfigKey key to check * @return Return File based on configured or fallback value. */ public File getReconDbDir(ConfigurationSource conf, String dirConfigKey) { File metadataDir = getDirectoryFromConfig(conf, dirConfigKey, "Recon"); if (metadataDir != null) { return metadataDir; } log.warn("{} is not configured. We recommend adding this setting. " + "Falling back to {} instead.", dirConfigKey, HddsConfigKeys.OZONE_METADATA_DIRS); return getOzoneMetaDirPath(conf); } /** * Given a source directory, create a tar file from it. * * @param sourcePath the path to the directory to be archived. * @return tar file */ public static File createTarFile(Path sourcePath) throws IOException { String source = StringUtils.removeEnd(sourcePath.toString(), "/"); File tarFile = new File(source.concat(".tar")); Archiver.create(tarFile, sourcePath); return tarFile; } /** * Untar DB snapshot tar file to recon OM snapshot directory. * * @param tarFile source tar file * @param destPath destination path to untar to. * @throws IOException ioException */ public void untarCheckpointFile(File tarFile, Path destPath) throws IOException { Archiver.extract(tarFile, destPath); } /** * Constructs the full path of a key from its OmKeyInfo using a bottom-up approach, starting from the leaf node. * <p> * The method begins with the leaf node (the key itself) and recursively prepends parent directory names, fetched * via NSSummary objects, until reaching the parent bucket (parentId is -1). It effectively builds the path from * bottom to top, finally prepending the volume and bucket names to complete the full path. If the directory structure * is currently being rebuilt (indicated by the rebuildTriggered flag), this method returns an empty string to signify * that path construction is temporarily unavailable. * * @param omKeyInfo The OmKeyInfo object for the key * @return The constructed full path of the key as a String, or an empty string if a rebuild is in progress and * the path cannot be constructed at this time. * @throws IOException */ public static String constructFullPath(OmKeyInfo omKeyInfo, ReconNamespaceSummaryManager reconNamespaceSummaryManager) throws IOException { return constructFullPath(omKeyInfo.getKeyName(), omKeyInfo.getParentObjectID(), omKeyInfo.getVolumeName(), omKeyInfo.getBucketName(), reconNamespaceSummaryManager); } /** * Constructs the full path of a key from its key name and parent ID using a bottom-up approach, starting from the * leaf node. * <p> * The method begins with the leaf node (the key itself) and recursively prepends parent directory names, fetched * via NSSummary objects, until reaching the parent bucket (parentId is -1). It effectively builds the path from * bottom to top, finally prepending the volume and bucket names to complete the full path. If the directory structure * is currently being rebuilt (indicated by the rebuildTriggered flag), this method returns an empty string to signify * that path construction is temporarily unavailable. * * @param keyName The name of the key * @param initialParentId The parent ID of the key * @param volumeName The name of the volume * @param bucketName The name of the bucket * @return The constructed full path of the key as a String, or an empty string if a rebuild is in progress and * the path cannot be constructed at this time. * @throws IOException */ public static String constructFullPath(String keyName, long initialParentId, String volumeName, String bucketName, ReconNamespaceSummaryManager reconNamespaceSummaryManager) throws IOException { StringBuilder fullPath = constructFullPathPrefix(initialParentId, volumeName, bucketName, reconNamespaceSummaryManager); if (fullPath.length() == 0) { return ""; } fullPath.append(keyName); return fullPath.toString(); } /** * Constructs the prefix path to a key from its key name and parent ID using a bottom-up approach, starting from the * leaf node. * <p> * The method begins with the leaf node (the key itself) and recursively prepends parent directory names, fetched * via NSSummary objects, until reaching the parent bucket (parentId is -1). It effectively builds the path from * bottom to top, finally prepending the volume and bucket names to complete the full path. If the directory structure * is currently being rebuilt (indicated by the rebuildTriggered flag), this method returns an empty string to signify * that path construction is temporarily unavailable. * * @param initialParentId The parent ID of the key * @param volumeName The name of the volume * @param bucketName The name of the bucket * @return A StringBuilder containing the constructed prefix path of the key, or an empty string builder if a rebuild * is in progress. * @throws IOException */ public static StringBuilder constructFullPathPrefix(long initialParentId, String volumeName, String bucketName, ReconNamespaceSummaryManager reconNamespaceSummaryManager) throws IOException { StringBuilder fullPath = new StringBuilder(); long parentId = initialParentId; boolean isDirectoryPresent = false; List<String> pathSegments = new ArrayList<>(); while (parentId != 0) { NSSummary nsSummary = reconNamespaceSummaryManager.getNSSummary(parentId); if (nsSummary == null) { log.warn("NSSummary tree is currently being rebuilt or the directory could be in the progress of " + "deletion, returning empty string for path construction."); fullPath.setLength(0); return fullPath; } // On the last pass, dir-name will be empty and parent will be zero, indicating the loop should end. if (!nsSummary.getDirName().isEmpty()) { pathSegments.add(nsSummary.getDirName()); } // Move to the parent ID of the current directory parentId = nsSummary.getParentId(); isDirectoryPresent = true; } fullPath.append(volumeName).append(OM_KEY_PREFIX) .append(bucketName).append(OM_KEY_PREFIX); // Build the components in a list, then reverse and join once for (int i = pathSegments.size() - 1; i >= 0; i--) { fullPath.append(pathSegments.get(i)).append(OM_KEY_PREFIX); } // TODO - why is this needed? It seems lke it should handle double slashes in the path name, // but its not clear how they get there. This normalize call is quite expensive as it // creates several objects (URI, PATH, back to string). There was a bug fixed above // where the last parent dirName was empty, which always caused a double // after the // bucket name, but with that fixed, it seems like this should not be needed. All tests // pass without it for key listing. if (isDirectoryPresent) { if (fullPath.indexOf("//") >= 0) { String path = fullPath.toString(); fullPath.setLength(0); fullPath.append(OmUtils.normalizeKey(path, true)); } } return fullPath; } /** * Converts a key prefix into an object path for FSO buckets, using IDs. * * This method transforms a user-provided path (e.g., "volume/bucket/dir1") into * a database-friendly format ("/volumeID/bucketID/ParentId/") by replacing names * with their corresponding IDs. It simplifies database queries for FSO bucket operations. * <pre> * {@code * Examples: * - Input: "volume/bucket/key" -> Output: "/volumeID/bucketID/parentDirID/key" * - Input: "volume/bucket/dir1" -> Output: "/volumeID/bucketID/dir1ID/" * - Input: "volume/bucket/dir1/key1" -> Output: "/volumeID/bucketID/dir1ID/key1" * - Input: "volume/bucket/dir1/dir2" -> Output: "/volumeID/bucketID/dir2ID/" * } * </pre> * @param prevKeyPrefix The path to be converted. * @return The object path as "/volumeID/bucketID/ParentId/" or an empty string if an error occurs. * @throws IOException If database access fails. * @throws IllegalArgumentException If the provided path is invalid or cannot be converted. */ public static String convertToObjectPathForOpenKeySearch(String prevKeyPrefix, ReconOMMetadataManager omMetadataManager, ReconNamespaceSummaryManager reconNamespaceSummaryManager, OzoneStorageContainerManager reconSCM) throws IOException { try { String[] names = EntityHandler.parseRequestPath(EntityHandler.normalizePath( prevKeyPrefix, BucketLayout.FILE_SYSTEM_OPTIMIZED)); Table<String, OmKeyInfo> openFileTable = omMetadataManager.getOpenKeyTable( BucketLayout.FILE_SYSTEM_OPTIMIZED); // Root-Level: Return the original path if (names.length == 0 || names[0].isEmpty()) { return prevKeyPrefix; } // Volume-Level: Fetch the volumeID String volumeName = names[0]; validateNames(volumeName); String volumeKey = omMetadataManager.getVolumeKey(volumeName); long volumeId = omMetadataManager.getVolumeTable().getSkipCache(volumeKey).getObjectID(); if (names.length == 1) { return constructObjectPathWithPrefix(volumeId); } // Bucket-Level: Fetch the bucketID String bucketName = names[1]; validateNames(bucketName); String bucketKey = omMetadataManager.getBucketKey(volumeName, bucketName); OmBucketInfo bucketInfo = omMetadataManager.getBucketTable().getSkipCache(bucketKey); long bucketId = bucketInfo.getObjectID(); if (names.length == 2 || bucketInfo.getBucketLayout() != BucketLayout.FILE_SYSTEM_OPTIMIZED) { return constructObjectPathWithPrefix(volumeId, bucketId); } // Directory or Key-Level: Check both key and directory BucketHandler handler = BucketHandler.getBucketHandler(reconNamespaceSummaryManager, omMetadataManager, reconSCM, bucketInfo); if (names.length >= 3) { String lastEntiry = names[names.length - 1]; // Check if the directory exists OmDirectoryInfo dirInfo = handler.getDirInfo(names); if (dirInfo != null && dirInfo.getName().equals(lastEntiry)) { return constructObjectPathWithPrefix(volumeId, bucketId, dirInfo.getObjectID()) + OM_KEY_PREFIX; } // Check if the key exists long dirID = handler.getDirObjectId(names, names.length); String keyKey = constructObjectPathWithPrefix(volumeId, bucketId, dirID) + OM_KEY_PREFIX + lastEntiry; OmKeyInfo keyInfo = openFileTable.getSkipCache(keyKey); if (keyInfo != null && keyInfo.getFileName().equals(lastEntiry)) { return constructObjectPathWithPrefix(volumeId, bucketId, keyInfo.getParentObjectID()) + OM_KEY_PREFIX + lastEntiry; } return prevKeyPrefix; } } catch (IllegalArgumentException e) { log.error( "IllegalArgumentException encountered while converting key prefix to object path: {}", prevKeyPrefix, e); throw e; } catch (RuntimeException e) { log.error( "RuntimeException encountered while converting key prefix to object path: {}", prevKeyPrefix, e); return prevKeyPrefix; } return prevKeyPrefix; } /** * Make HTTP GET call on the URL and return HttpURLConnection instance. * * @param connectionFactory URLConnectionFactory to use. * @param url url to call * @param isSpnego is SPNEGO enabled * @return HttpURLConnection instance of the HTTP call. * @throws IOException While reading the response, * @throws AuthenticationException */ public HttpURLConnection makeHttpCall(URLConnectionFactory connectionFactory, String url, boolean isSpnego) throws IOException, AuthenticationException { HttpURLConnection urlConnection = (HttpURLConnection) connectionFactory.openConnection(new URL(url), isSpnego); urlConnection.connect(); return urlConnection; } /** * Load last known DB in Recon. * * @param reconDbDir * @param fileNamePrefix * @return */ public File getLastKnownDB(File reconDbDir, String fileNamePrefix) { String lastKnownSnapshotFileName = null; File lastKnownSnapshotFile = null; long lastKnonwnSnapshotTs = Long.MIN_VALUE; if (reconDbDir != null) { File[] snapshotFiles = reconDbDir.listFiles((dir, name) -> name.startsWith(fileNamePrefix)); if (snapshotFiles != null) { for (File snapshotFile : snapshotFiles) { String fileName = snapshotFile.getName(); try { String[] fileNameSplits = fileName.split("_"); if (fileNameSplits.length <= 1) { continue; } long snapshotTimestamp = Long.parseLong(fileNameSplits[1]); if (lastKnonwnSnapshotTs < snapshotTimestamp) { if (lastKnownSnapshotFile != null) { try { FileUtils.forceDelete(lastKnownSnapshotFile); } catch (IOException e) { log.warn("Error deleting existing om db snapshot directory: {}", lastKnownSnapshotFile.getAbsolutePath()); } } lastKnonwnSnapshotTs = snapshotTimestamp; lastKnownSnapshotFileName = fileName; lastKnownSnapshotFile = snapshotFile; } } catch (NumberFormatException nfEx) { log.warn("Unknown file found in Recon DB dir : {}", fileName); FileUtils.deleteQuietly(snapshotFile); } } } } return lastKnownSnapshotFileName == null ? null : new File(reconDbDir.getPath(), lastKnownSnapshotFileName); } /** * Upsert row in GlobalStats table. * * @param sqlConfiguration * @param globalStatsDao * @param key * @param count */ public static void upsertGlobalStatsTable(Configuration sqlConfiguration, GlobalStatsDao globalStatsDao, String key, Long count) { // Get the current timestamp Timestamp now = using(sqlConfiguration).fetchValue(select(currentTimestamp())); GlobalStats record = globalStatsDao.fetchOneByKey(key); GlobalStats newRecord = new GlobalStats(key, count, now); // Insert a new record for key if it does not exist if (record == null) { globalStatsDao.insert(newRecord); } else { globalStatsDao.update(newRecord); } } /** * Converts Unix numeric permissions into a symbolic representation. * @param numericPermissions The numeric string, e.g., "750". * @return The symbolic representation, e.g., "rwxr-x---". */ public static String convertNumericToSymbolic(String numericPermissions) { int owner = Character.getNumericValue(numericPermissions.charAt(0)); int group = Character.getNumericValue(numericPermissions.charAt(1)); int others = Character.getNumericValue(numericPermissions.charAt(2)); return String.format("%s%s%s", convertToSymbolicPermission(owner), convertToSymbolicPermission(group), convertToSymbolicPermission(others)); } /** * Converts a single digit Unix permission into a symbolic representation. * @param permission The permission digit. * @return The symbolic representation for the digit. */ public static String convertToSymbolicPermission(int permission) { String[] symbols = {"---", "--x", "-w-", "-wx", "r--", "r-x", "rw-", "rwx"}; return symbols[permission]; } /** * Sorts a list of DiskUsage objects in descending order by size using parallel sorting and * returns the top N records as specified by the limit. * * This method is optimized for large datasets and utilizes parallel processing to efficiently * sort and retrieve the top N largest records by size. It's especially useful for reducing * processing time and memory usage when only a subset of sorted records is needed. * * Advantages of this approach include: * - Efficient handling of large datasets by leveraging multi-core processors. * - Reduction in memory usage and improvement in processing time by limiting the * number of returned records. * - Scalability and easy integration with existing systems. * * @param diskUsageList the list of DiskUsage objects to be sorted. * @param limit the maximum number of DiskUsage objects to return. * @return a list of the top N DiskUsage objects sorted in descending order by size, * where N is the specified limit. */ public static List<DUResponse.DiskUsage> sortDiskUsageDescendingWithLimit( List<DUResponse.DiskUsage> diskUsageList, int limit) { return diskUsageList.parallelStream() .sorted((du1, du2) -> Long.compare(du2.getSize(), du1.getSize())) .limit(limit) .collect(Collectors.toList()); } public static long getFileSizeUpperBound(long fileSize) { if (fileSize >= ReconConstants.MAX_FILE_SIZE_UPPER_BOUND) { return Long.MAX_VALUE; } // The smallest file size being tracked for count // is 1 KB i.e. 1024 = 2 ^ 10. int binIndex = getFileSizeBinIndex(fileSize); return (long) Math.pow(2, (10 + binIndex)); } public static long getContainerSizeUpperBound(long containerSize) { if (containerSize >= ReconConstants.MAX_CONTAINER_SIZE_UPPER_BOUND) { return Long.MAX_VALUE; } // The smallest container size being tracked for count // is 512MB i.e. 536870912L = 2 ^ 29. int binIndex = getContainerSizeBinIndex(containerSize); return (long) Math.pow(2, (29 + binIndex)); } public static int getFileSizeBinIndex(long fileSize) { Preconditions.checkArgument(fileSize >= 0, "fileSize = %s < 0", fileSize); // if the file size is larger than our track scope, // we map it to the last bin if (fileSize >= ReconConstants.MAX_FILE_SIZE_UPPER_BOUND) { return ReconConstants.NUM_OF_FILE_SIZE_BINS - 1; } int index = nextClosestPowerIndexOfTwo(fileSize); // if the file size is smaller than our track scope, // we map it to the first bin return index < 10 ? 0 : index - 10; } public static int getContainerSizeBinIndex(long containerSize) { Preconditions.checkArgument(containerSize >= 0, "containerSize = %s < 0", containerSize); // if the container size is larger than our track scope, // we map it to the last bin if (containerSize >= ReconConstants.MAX_CONTAINER_SIZE_UPPER_BOUND) { return ReconConstants.NUM_OF_CONTAINER_SIZE_BINS - 1; } int index = nextClosestPowerIndexOfTwo(containerSize); // if the container size is smaller than our track scope, // we map it to the first bin return index < 29 ? 0 : index - 29; } static int nextClosestPowerIndexOfTwo(long n) { return n > 0 ? 64 - Long.numberOfLeadingZeros(n - 1) : n == 0 ? 0 : n == Long.MIN_VALUE ? -63 : -nextClosestPowerIndexOfTwo(-n); } public SCMNodeDetails getReconNodeDetails(OzoneConfiguration conf) { SCMNodeDetails.Builder builder = new SCMNodeDetails.Builder(); builder.setSCMNodeId("Recon"); builder.setDatanodeProtocolServerAddress( HddsServerUtil.getReconDataNodeBindAddress(conf)); return builder.build(); } @VisibleForTesting public static void setLogger(Logger logger) { log = logger; } /** * Return if all OMDB tables that will be used are initialized. * @return if tables are initialized */ public static boolean isInitializationComplete(ReconOMMetadataManager omMetadataManager) { if (omMetadataManager == null) { return false; } return omMetadataManager.getVolumeTable() != null && omMetadataManager.getBucketTable() != null && omMetadataManager.getDirectoryTable() != null && omMetadataManager.getFileTable() != null && omMetadataManager.getKeyTable(BucketLayout.LEGACY) != null; } /** * Converts string date in a provided format to server timezone's epoch milllioseconds. * * @param dateString * @param dateFormat * @param timeZone * @return the epoch milliseconds representation of the date. */ public static long convertToEpochMillis(String dateString, String dateFormat, TimeZone timeZone) { String localDateFormat = dateFormat; try { if (StringUtils.isEmpty(dateString)) { return Instant.now().toEpochMilli(); } if (StringUtils.isEmpty(dateFormat)) { localDateFormat = "MM-dd-yyyy HH:mm:ss"; } if (null == timeZone) { timeZone = TimeZone.getDefault(); } SimpleDateFormat sdf = new SimpleDateFormat(localDateFormat); sdf.setTimeZone(timeZone); // Set server's timezone Date date = sdf.parse(dateString); return date.getTime(); // Convert to epoch milliseconds } catch (ParseException parseException) { log.error("Date parse exception for date: {} in format: {} -> {}", dateString, localDateFormat, parseException); return Instant.now().toEpochMilli(); } catch (Exception exception) { log.error("Unexpected error while parsing date: {} in format: {} -> {}", dateString, localDateFormat, exception); return Instant.now().toEpochMilli(); } } public static boolean validateStartPrefix(String startPrefix) { // Ensure startPrefix starts with '/' for non-empty values startPrefix = startPrefix.startsWith("/") ? startPrefix : "/" + startPrefix; // Split the path to ensure it's at least at the bucket level (volume/bucket). String[] pathComponents = startPrefix.split("/"); if (pathComponents.length < 3 || pathComponents[2].isEmpty()) { return false; // Invalid if not at bucket level or deeper } return true; } /** * Retrieves keys from the specified table based on pagination and prefix filtering. * This method handles different scenarios based on the presence of {@code startPrefix} * and {@code prevKey}, enabling efficient key retrieval from the table. * * The method handles the following cases: * * 1. {@code prevKey} provided, {@code startPrefix} empty: * - Seeks to {@code prevKey}, skips it, and returns subsequent records up to the limit. * * 2. {@code prevKey} empty, {@code startPrefix} empty: * - Iterates from the beginning of the table, retrieving all records up to the limit. * * 3. {@code startPrefix} provided, {@code prevKey} empty: * - Seeks to the first key matching {@code startPrefix} and returns all matching keys up to the limit. * * 4. {@code startPrefix} provided, {@code prevKey} provided: * - Seeks to {@code prevKey}, skips it, and returns subsequent keys that match {@code startPrefix}, * up to the limit. * * This method also handles the following {@code limit} scenarios: * - If {@code limit == 0} or {@code limit < -1}, no records are returned. * - If {@code limit == -1}, all records are returned. * - For positive {@code limit}, it retrieves records up to the specified {@code limit}. * * @param table The table to retrieve keys from. * @param startPrefix The search prefix to match keys against. * @param limit The maximum number of keys to retrieve. * @param prevKey The key to start after for the next set of records. * @return A map of keys and their corresponding {@code OmKeyInfo} or {@code RepeatedOmKeyInfo} objects. * @throws IOException If there are problems accessing the table. */ public static <T> Map<String, T> extractKeysFromTable( Table<String, T> table, String startPrefix, int limit, String prevKey) throws IOException { Map<String, T> matchedKeys = new LinkedHashMap<>(); // Null check for the table to prevent NPE during omMetaManager initialization if (table == null) { log.error("Table object is null. omMetaManager might still be initializing."); return Collections.emptyMap(); } // If limit = 0, return an empty result set if (limit == 0 || limit < -1) { return matchedKeys; } // If limit = -1, set it to Integer.MAX_VALUE to return all records int actualLimit = (limit == -1) ? Integer.MAX_VALUE : limit; try (TableIterator<String, ? extends Table.KeyValue<String, T>> keyIter = table.iterator()) { // Scenario 1 & 4: prevKey is provided (whether startPrefix is empty or not) if (!prevKey.isEmpty()) { keyIter.seek(prevKey); if (keyIter.hasNext()) { keyIter.next(); // Skip the previous key record } } else if (!startPrefix.isEmpty()) { // Scenario 3: startPrefix is provided but prevKey is empty, so seek to startPrefix keyIter.seek(startPrefix); } // Scenario 2: Both startPrefix and prevKey are empty (iterate from the start of the table) // No seeking needed; just start iterating from the first record in the table // This is implicit in the following loop, as the iterator will start from the beginning // Iterate through the keys while adhering to the limit (if the limit is not zero) while (keyIter.hasNext() && matchedKeys.size() < actualLimit) { Table.KeyValue<String, T> entry = keyIter.next(); String dbKey = entry.getKey(); // Scenario 3 & 4: If startPrefix is provided, ensure the key matches startPrefix if (!startPrefix.isEmpty() && !dbKey.startsWith(startPrefix)) { break; // If the key no longer matches the prefix, exit the loop } // Add the valid key-value pair to the results matchedKeys.put(dbKey, entry.getValue()); } } catch (IOException exception) { log.error("Error retrieving keys from table for path: {}", startPrefix, exception); throw exception; } return matchedKeys; } /** * Finds all subdirectories under a parent directory in an FSO bucket. It builds * a list of paths for these subdirectories. These sub-directories are then used * to search for open files in the openFileTable. * * How it works: * - Starts from a parent directory identified by parentId. * - Looks through all child directories of this parent. * - For each child, it creates a path that starts with volumeID/bucketID/parentId, * following our openFileTable format. * - Adds these paths to a list and explores each child further for more subdirectories. * * @param parentId The ID of the parent directory from which to start gathering subdirectories. * @param subPaths The list to which the paths of subdirectories will be added. * @param volumeID The ID of the volume containing the parent directory. * @param bucketID The ID of the bucket containing the parent directory. * @param reconNamespaceSummaryManager The manager used to retrieve NSSummary objects. * @throws IOException If an I/O error occurs while fetching NSSummary objects. */ public static void gatherSubPaths(long parentId, List<String> subPaths, long volumeID, long bucketID, ReconNamespaceSummaryManager reconNamespaceSummaryManager) throws IOException { // Fetch the NSSummary object for parentId NSSummary parentSummary = reconNamespaceSummaryManager.getNSSummary(parentId); if (parentSummary == null) { return; } Set<Long> childDirIds = parentSummary.getChildDir(); for (Long childId : childDirIds) { // Fetch the NSSummary for each child directory NSSummary childSummary = reconNamespaceSummaryManager.getNSSummary(childId); if (childSummary != null) { String subPath = ReconUtils.constructObjectPathWithPrefix(volumeID, bucketID, childId); // Add to subPaths subPaths.add(subPath); // Recurse into this child directory gatherSubPaths(childId, subPaths, volumeID, bucketID, reconNamespaceSummaryManager); } } } /** * Validates volume or bucket names according to specific rules. * * @param resName The name to validate (volume or bucket). * @return A Response object if validation fails, or null if the name is valid. */ public static Response validateNames(String resName) throws IllegalArgumentException { if (resName.length() < OzoneConsts.OZONE_MIN_BUCKET_NAME_LENGTH || resName.length() > OzoneConsts.OZONE_MAX_BUCKET_NAME_LENGTH) { throw new IllegalArgumentException( "Bucket or Volume name length should be between " + OzoneConsts.OZONE_MIN_BUCKET_NAME_LENGTH + " and " + OzoneConsts.OZONE_MAX_BUCKET_NAME_LENGTH); } if (resName.charAt(0) == '.' || resName.charAt(0) == '-' || resName.charAt(resName.length() - 1) == '.' || resName.charAt(resName.length() - 1) == '-') { throw new IllegalArgumentException( "Bucket or Volume name cannot start or end with " + "hyphen or period"); } // Regex to check for lowercase letters, numbers, hyphens, underscores, and periods only. if (!resName.matches("^[a-z0-9._-]+$")) { throw new IllegalArgumentException( "Bucket or Volume name can only contain lowercase " + "letters, numbers, hyphens, underscores, and periods"); } // If all checks pass, the name is valid return null; } /** * Constructs an object path with the given IDs. * * @param ids The IDs to construct the object path with. * @return The constructed object path. */ public static String constructObjectPathWithPrefix(long... ids) { StringBuilder pathBuilder = new StringBuilder(); for (long id : ids) { pathBuilder.append(OM_KEY_PREFIX).append(id); } return pathBuilder.toString(); } }
googleapis/google-cloud-java
35,944
java-securitycenter/proto-google-cloud-securitycenter-v1beta1/src/main/java/com/google/cloud/securitycenter/v1beta1/UpdateOrganizationSettingsRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/securitycenter/v1beta1/securitycenter_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.securitycenter.v1beta1; /** * * * <pre> * Request message for updating an organization's settings. * </pre> * * Protobuf type {@code google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest} */ public final class UpdateOrganizationSettingsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest) UpdateOrganizationSettingsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateOrganizationSettingsRequest.newBuilder() to construct. private UpdateOrganizationSettingsRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateOrganizationSettingsRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateOrganizationSettingsRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securitycenter.v1beta1.SecuritycenterService .internal_static_google_cloud_securitycenter_v1beta1_UpdateOrganizationSettingsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securitycenter.v1beta1.SecuritycenterService .internal_static_google_cloud_securitycenter_v1beta1_UpdateOrganizationSettingsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest.class, com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest.Builder .class); } private int bitField0_; public static final int ORGANIZATION_SETTINGS_FIELD_NUMBER = 1; private com.google.cloud.securitycenter.v1beta1.OrganizationSettings organizationSettings_; /** * * * <pre> * Required. The organization settings resource to update. * </pre> * * <code> * .google.cloud.securitycenter.v1beta1.OrganizationSettings organization_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the organizationSettings field is set. */ @java.lang.Override public boolean hasOrganizationSettings() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The organization settings resource to update. * </pre> * * <code> * .google.cloud.securitycenter.v1beta1.OrganizationSettings organization_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The organizationSettings. */ @java.lang.Override public com.google.cloud.securitycenter.v1beta1.OrganizationSettings getOrganizationSettings() { return organizationSettings_ == null ? com.google.cloud.securitycenter.v1beta1.OrganizationSettings.getDefaultInstance() : organizationSettings_; } /** * * * <pre> * Required. The organization settings resource to update. * </pre> * * <code> * .google.cloud.securitycenter.v1beta1.OrganizationSettings organization_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.securitycenter.v1beta1.OrganizationSettingsOrBuilder getOrganizationSettingsOrBuilder() { return organizationSettings_ == null ? com.google.cloud.securitycenter.v1beta1.OrganizationSettings.getDefaultInstance() : organizationSettings_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * The FieldMask to use when updating the settings resource. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The FieldMask to use when updating the settings resource. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * The FieldMask to use when updating the settings resource. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getOrganizationSettings()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getOrganizationSettings()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest)) { return super.equals(obj); } com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest other = (com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest) obj; if (hasOrganizationSettings() != other.hasOrganizationSettings()) return false; if (hasOrganizationSettings()) { if (!getOrganizationSettings().equals(other.getOrganizationSettings())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasOrganizationSettings()) { hash = (37 * hash) + ORGANIZATION_SETTINGS_FIELD_NUMBER; hash = (53 * hash) + getOrganizationSettings().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for updating an organization's settings. * </pre> * * Protobuf type {@code google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest) com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securitycenter.v1beta1.SecuritycenterService .internal_static_google_cloud_securitycenter_v1beta1_UpdateOrganizationSettingsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securitycenter.v1beta1.SecuritycenterService .internal_static_google_cloud_securitycenter_v1beta1_UpdateOrganizationSettingsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest.class, com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest.Builder .class); } // Construct using // com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getOrganizationSettingsFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; organizationSettings_ = null; if (organizationSettingsBuilder_ != null) { organizationSettingsBuilder_.dispose(); organizationSettingsBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.securitycenter.v1beta1.SecuritycenterService .internal_static_google_cloud_securitycenter_v1beta1_UpdateOrganizationSettingsRequest_descriptor; } @java.lang.Override public com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest getDefaultInstanceForType() { return com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest build() { com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest buildPartial() { com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest result = new com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.organizationSettings_ = organizationSettingsBuilder_ == null ? organizationSettings_ : organizationSettingsBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest) { return mergeFrom( (com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest other) { if (other == com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest .getDefaultInstance()) return this; if (other.hasOrganizationSettings()) { mergeOrganizationSettings(other.getOrganizationSettings()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage( getOrganizationSettingsFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.securitycenter.v1beta1.OrganizationSettings organizationSettings_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.securitycenter.v1beta1.OrganizationSettings, com.google.cloud.securitycenter.v1beta1.OrganizationSettings.Builder, com.google.cloud.securitycenter.v1beta1.OrganizationSettingsOrBuilder> organizationSettingsBuilder_; /** * * * <pre> * Required. The organization settings resource to update. * </pre> * * <code> * .google.cloud.securitycenter.v1beta1.OrganizationSettings organization_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the organizationSettings field is set. */ public boolean hasOrganizationSettings() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The organization settings resource to update. * </pre> * * <code> * .google.cloud.securitycenter.v1beta1.OrganizationSettings organization_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The organizationSettings. */ public com.google.cloud.securitycenter.v1beta1.OrganizationSettings getOrganizationSettings() { if (organizationSettingsBuilder_ == null) { return organizationSettings_ == null ? com.google.cloud.securitycenter.v1beta1.OrganizationSettings.getDefaultInstance() : organizationSettings_; } else { return organizationSettingsBuilder_.getMessage(); } } /** * * * <pre> * Required. The organization settings resource to update. * </pre> * * <code> * .google.cloud.securitycenter.v1beta1.OrganizationSettings organization_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setOrganizationSettings( com.google.cloud.securitycenter.v1beta1.OrganizationSettings value) { if (organizationSettingsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } organizationSettings_ = value; } else { organizationSettingsBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The organization settings resource to update. * </pre> * * <code> * .google.cloud.securitycenter.v1beta1.OrganizationSettings organization_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setOrganizationSettings( com.google.cloud.securitycenter.v1beta1.OrganizationSettings.Builder builderForValue) { if (organizationSettingsBuilder_ == null) { organizationSettings_ = builderForValue.build(); } else { organizationSettingsBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The organization settings resource to update. * </pre> * * <code> * .google.cloud.securitycenter.v1beta1.OrganizationSettings organization_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeOrganizationSettings( com.google.cloud.securitycenter.v1beta1.OrganizationSettings value) { if (organizationSettingsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && organizationSettings_ != null && organizationSettings_ != com.google.cloud.securitycenter.v1beta1.OrganizationSettings .getDefaultInstance()) { getOrganizationSettingsBuilder().mergeFrom(value); } else { organizationSettings_ = value; } } else { organizationSettingsBuilder_.mergeFrom(value); } if (organizationSettings_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The organization settings resource to update. * </pre> * * <code> * .google.cloud.securitycenter.v1beta1.OrganizationSettings organization_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearOrganizationSettings() { bitField0_ = (bitField0_ & ~0x00000001); organizationSettings_ = null; if (organizationSettingsBuilder_ != null) { organizationSettingsBuilder_.dispose(); organizationSettingsBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The organization settings resource to update. * </pre> * * <code> * .google.cloud.securitycenter.v1beta1.OrganizationSettings organization_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.securitycenter.v1beta1.OrganizationSettings.Builder getOrganizationSettingsBuilder() { bitField0_ |= 0x00000001; onChanged(); return getOrganizationSettingsFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The organization settings resource to update. * </pre> * * <code> * .google.cloud.securitycenter.v1beta1.OrganizationSettings organization_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.securitycenter.v1beta1.OrganizationSettingsOrBuilder getOrganizationSettingsOrBuilder() { if (organizationSettingsBuilder_ != null) { return organizationSettingsBuilder_.getMessageOrBuilder(); } else { return organizationSettings_ == null ? com.google.cloud.securitycenter.v1beta1.OrganizationSettings.getDefaultInstance() : organizationSettings_; } } /** * * * <pre> * Required. The organization settings resource to update. * </pre> * * <code> * .google.cloud.securitycenter.v1beta1.OrganizationSettings organization_settings = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.securitycenter.v1beta1.OrganizationSettings, com.google.cloud.securitycenter.v1beta1.OrganizationSettings.Builder, com.google.cloud.securitycenter.v1beta1.OrganizationSettingsOrBuilder> getOrganizationSettingsFieldBuilder() { if (organizationSettingsBuilder_ == null) { organizationSettingsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.securitycenter.v1beta1.OrganizationSettings, com.google.cloud.securitycenter.v1beta1.OrganizationSettings.Builder, com.google.cloud.securitycenter.v1beta1.OrganizationSettingsOrBuilder>( getOrganizationSettings(), getParentForChildren(), isClean()); organizationSettings_ = null; } return organizationSettingsBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * The FieldMask to use when updating the settings resource. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The FieldMask to use when updating the settings resource. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * The FieldMask to use when updating the settings resource. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The FieldMask to use when updating the settings resource. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The FieldMask to use when updating the settings resource. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * The FieldMask to use when updating the settings resource. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The FieldMask to use when updating the settings resource. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * The FieldMask to use when updating the settings resource. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * The FieldMask to use when updating the settings resource. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest) } // @@protoc_insertion_point(class_scope:google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest) private static final com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest(); } public static com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateOrganizationSettingsRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateOrganizationSettingsRequest>() { @java.lang.Override public UpdateOrganizationSettingsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateOrganizationSettingsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateOrganizationSettingsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.securitycenter.v1beta1.UpdateOrganizationSettingsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,097
java-shopping-merchant-accounts/grpc-google-shopping-merchant-accounts-v1/src/main/java/com/google/shopping/merchant/accounts/v1/ProgramsServiceGrpc.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.shopping.merchant.accounts.v1; import static io.grpc.MethodDescriptor.generateFullMethodName; /** * * * <pre> * Service for program management. * Programs provide a mechanism for adding functionality to merchant accounts. A * typical example of this is the [Free product * listings](https://support.google.com/merchants/answer/13889434) * program, which enables products from a merchant's store to be shown across * Google for free. * This service exposes methods to retrieve a business's * participation in all available programs, in addition to methods for * explicitly enabling or disabling participation in each program. * </pre> */ @javax.annotation.Generated( value = "by gRPC proto compiler", comments = "Source: google/shopping/merchant/accounts/v1/programs.proto") @io.grpc.stub.annotations.GrpcGenerated public final class ProgramsServiceGrpc { private ProgramsServiceGrpc() {} public static final java.lang.String SERVICE_NAME = "google.shopping.merchant.accounts.v1.ProgramsService"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.GetProgramRequest, com.google.shopping.merchant.accounts.v1.Program> getGetProgramMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "GetProgram", requestType = com.google.shopping.merchant.accounts.v1.GetProgramRequest.class, responseType = com.google.shopping.merchant.accounts.v1.Program.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.GetProgramRequest, com.google.shopping.merchant.accounts.v1.Program> getGetProgramMethod() { io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.GetProgramRequest, com.google.shopping.merchant.accounts.v1.Program> getGetProgramMethod; if ((getGetProgramMethod = ProgramsServiceGrpc.getGetProgramMethod) == null) { synchronized (ProgramsServiceGrpc.class) { if ((getGetProgramMethod = ProgramsServiceGrpc.getGetProgramMethod) == null) { ProgramsServiceGrpc.getGetProgramMethod = getGetProgramMethod = io.grpc.MethodDescriptor .<com.google.shopping.merchant.accounts.v1.GetProgramRequest, com.google.shopping.merchant.accounts.v1.Program> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetProgram")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.GetProgramRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.Program .getDefaultInstance())) .setSchemaDescriptor( new ProgramsServiceMethodDescriptorSupplier("GetProgram")) .build(); } } } return getGetProgramMethod; } private static volatile io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.ListProgramsRequest, com.google.shopping.merchant.accounts.v1.ListProgramsResponse> getListProgramsMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "ListPrograms", requestType = com.google.shopping.merchant.accounts.v1.ListProgramsRequest.class, responseType = com.google.shopping.merchant.accounts.v1.ListProgramsResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.ListProgramsRequest, com.google.shopping.merchant.accounts.v1.ListProgramsResponse> getListProgramsMethod() { io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.ListProgramsRequest, com.google.shopping.merchant.accounts.v1.ListProgramsResponse> getListProgramsMethod; if ((getListProgramsMethod = ProgramsServiceGrpc.getListProgramsMethod) == null) { synchronized (ProgramsServiceGrpc.class) { if ((getListProgramsMethod = ProgramsServiceGrpc.getListProgramsMethod) == null) { ProgramsServiceGrpc.getListProgramsMethod = getListProgramsMethod = io.grpc.MethodDescriptor .<com.google.shopping.merchant.accounts.v1.ListProgramsRequest, com.google.shopping.merchant.accounts.v1.ListProgramsResponse> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListPrograms")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.ListProgramsRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.ListProgramsResponse .getDefaultInstance())) .setSchemaDescriptor( new ProgramsServiceMethodDescriptorSupplier("ListPrograms")) .build(); } } } return getListProgramsMethod; } private static volatile io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.EnableProgramRequest, com.google.shopping.merchant.accounts.v1.Program> getEnableProgramMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "EnableProgram", requestType = com.google.shopping.merchant.accounts.v1.EnableProgramRequest.class, responseType = com.google.shopping.merchant.accounts.v1.Program.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.EnableProgramRequest, com.google.shopping.merchant.accounts.v1.Program> getEnableProgramMethod() { io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.EnableProgramRequest, com.google.shopping.merchant.accounts.v1.Program> getEnableProgramMethod; if ((getEnableProgramMethod = ProgramsServiceGrpc.getEnableProgramMethod) == null) { synchronized (ProgramsServiceGrpc.class) { if ((getEnableProgramMethod = ProgramsServiceGrpc.getEnableProgramMethod) == null) { ProgramsServiceGrpc.getEnableProgramMethod = getEnableProgramMethod = io.grpc.MethodDescriptor .<com.google.shopping.merchant.accounts.v1.EnableProgramRequest, com.google.shopping.merchant.accounts.v1.Program> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "EnableProgram")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.EnableProgramRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.Program .getDefaultInstance())) .setSchemaDescriptor( new ProgramsServiceMethodDescriptorSupplier("EnableProgram")) .build(); } } } return getEnableProgramMethod; } private static volatile io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.DisableProgramRequest, com.google.shopping.merchant.accounts.v1.Program> getDisableProgramMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "DisableProgram", requestType = com.google.shopping.merchant.accounts.v1.DisableProgramRequest.class, responseType = com.google.shopping.merchant.accounts.v1.Program.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.DisableProgramRequest, com.google.shopping.merchant.accounts.v1.Program> getDisableProgramMethod() { io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.DisableProgramRequest, com.google.shopping.merchant.accounts.v1.Program> getDisableProgramMethod; if ((getDisableProgramMethod = ProgramsServiceGrpc.getDisableProgramMethod) == null) { synchronized (ProgramsServiceGrpc.class) { if ((getDisableProgramMethod = ProgramsServiceGrpc.getDisableProgramMethod) == null) { ProgramsServiceGrpc.getDisableProgramMethod = getDisableProgramMethod = io.grpc.MethodDescriptor .<com.google.shopping.merchant.accounts.v1.DisableProgramRequest, com.google.shopping.merchant.accounts.v1.Program> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "DisableProgram")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.DisableProgramRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.Program .getDefaultInstance())) .setSchemaDescriptor( new ProgramsServiceMethodDescriptorSupplier("DisableProgram")) .build(); } } } return getDisableProgramMethod; } /** Creates a new async stub that supports all call types for the service */ public static ProgramsServiceStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<ProgramsServiceStub> factory = new io.grpc.stub.AbstractStub.StubFactory<ProgramsServiceStub>() { @java.lang.Override public ProgramsServiceStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ProgramsServiceStub(channel, callOptions); } }; return ProgramsServiceStub.newStub(factory, channel); } /** Creates a new blocking-style stub that supports all types of calls on the service */ public static ProgramsServiceBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<ProgramsServiceBlockingV2Stub> factory = new io.grpc.stub.AbstractStub.StubFactory<ProgramsServiceBlockingV2Stub>() { @java.lang.Override public ProgramsServiceBlockingV2Stub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ProgramsServiceBlockingV2Stub(channel, callOptions); } }; return ProgramsServiceBlockingV2Stub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static ProgramsServiceBlockingStub newBlockingStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<ProgramsServiceBlockingStub> factory = new io.grpc.stub.AbstractStub.StubFactory<ProgramsServiceBlockingStub>() { @java.lang.Override public ProgramsServiceBlockingStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ProgramsServiceBlockingStub(channel, callOptions); } }; return ProgramsServiceBlockingStub.newStub(factory, channel); } /** Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static ProgramsServiceFutureStub newFutureStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<ProgramsServiceFutureStub> factory = new io.grpc.stub.AbstractStub.StubFactory<ProgramsServiceFutureStub>() { @java.lang.Override public ProgramsServiceFutureStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ProgramsServiceFutureStub(channel, callOptions); } }; return ProgramsServiceFutureStub.newStub(factory, channel); } /** * * * <pre> * Service for program management. * Programs provide a mechanism for adding functionality to merchant accounts. A * typical example of this is the [Free product * listings](https://support.google.com/merchants/answer/13889434) * program, which enables products from a merchant's store to be shown across * Google for free. * This service exposes methods to retrieve a business's * participation in all available programs, in addition to methods for * explicitly enabling or disabling participation in each program. * </pre> */ public interface AsyncService { /** * * * <pre> * Retrieves the specified program for the account. * </pre> */ default void getProgram( com.google.shopping.merchant.accounts.v1.GetProgramRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.Program> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetProgramMethod(), responseObserver); } /** * * * <pre> * Retrieves all programs for the account. * </pre> */ default void listPrograms( com.google.shopping.merchant.accounts.v1.ListProgramsRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.ListProgramsResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getListProgramsMethod(), responseObserver); } /** * * * <pre> * Enable participation in the specified program for the account. * </pre> */ default void enableProgram( com.google.shopping.merchant.accounts.v1.EnableProgramRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.Program> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getEnableProgramMethod(), responseObserver); } /** * * * <pre> * Disable participation in the specified program for the account. * </pre> */ default void disableProgram( com.google.shopping.merchant.accounts.v1.DisableProgramRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.Program> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getDisableProgramMethod(), responseObserver); } } /** * Base class for the server implementation of the service ProgramsService. * * <pre> * Service for program management. * Programs provide a mechanism for adding functionality to merchant accounts. A * typical example of this is the [Free product * listings](https://support.google.com/merchants/answer/13889434) * program, which enables products from a merchant's store to be shown across * Google for free. * This service exposes methods to retrieve a business's * participation in all available programs, in addition to methods for * explicitly enabling or disabling participation in each program. * </pre> */ public abstract static class ProgramsServiceImplBase implements io.grpc.BindableService, AsyncService { @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return ProgramsServiceGrpc.bindService(this); } } /** * A stub to allow clients to do asynchronous rpc calls to service ProgramsService. * * <pre> * Service for program management. * Programs provide a mechanism for adding functionality to merchant accounts. A * typical example of this is the [Free product * listings](https://support.google.com/merchants/answer/13889434) * program, which enables products from a merchant's store to be shown across * Google for free. * This service exposes methods to retrieve a business's * participation in all available programs, in addition to methods for * explicitly enabling or disabling participation in each program. * </pre> */ public static final class ProgramsServiceStub extends io.grpc.stub.AbstractAsyncStub<ProgramsServiceStub> { private ProgramsServiceStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected ProgramsServiceStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ProgramsServiceStub(channel, callOptions); } /** * * * <pre> * Retrieves the specified program for the account. * </pre> */ public void getProgram( com.google.shopping.merchant.accounts.v1.GetProgramRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.Program> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getGetProgramMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Retrieves all programs for the account. * </pre> */ public void listPrograms( com.google.shopping.merchant.accounts.v1.ListProgramsRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.ListProgramsResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getListProgramsMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Enable participation in the specified program for the account. * </pre> */ public void enableProgram( com.google.shopping.merchant.accounts.v1.EnableProgramRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.Program> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getEnableProgramMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Disable participation in the specified program for the account. * </pre> */ public void disableProgram( com.google.shopping.merchant.accounts.v1.DisableProgramRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.Program> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getDisableProgramMethod(), getCallOptions()), request, responseObserver); } } /** * A stub to allow clients to do synchronous rpc calls to service ProgramsService. * * <pre> * Service for program management. * Programs provide a mechanism for adding functionality to merchant accounts. A * typical example of this is the [Free product * listings](https://support.google.com/merchants/answer/13889434) * program, which enables products from a merchant's store to be shown across * Google for free. * This service exposes methods to retrieve a business's * participation in all available programs, in addition to methods for * explicitly enabling or disabling participation in each program. * </pre> */ public static final class ProgramsServiceBlockingV2Stub extends io.grpc.stub.AbstractBlockingStub<ProgramsServiceBlockingV2Stub> { private ProgramsServiceBlockingV2Stub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected ProgramsServiceBlockingV2Stub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ProgramsServiceBlockingV2Stub(channel, callOptions); } /** * * * <pre> * Retrieves the specified program for the account. * </pre> */ public com.google.shopping.merchant.accounts.v1.Program getProgram( com.google.shopping.merchant.accounts.v1.GetProgramRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetProgramMethod(), getCallOptions(), request); } /** * * * <pre> * Retrieves all programs for the account. * </pre> */ public com.google.shopping.merchant.accounts.v1.ListProgramsResponse listPrograms( com.google.shopping.merchant.accounts.v1.ListProgramsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListProgramsMethod(), getCallOptions(), request); } /** * * * <pre> * Enable participation in the specified program for the account. * </pre> */ public com.google.shopping.merchant.accounts.v1.Program enableProgram( com.google.shopping.merchant.accounts.v1.EnableProgramRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getEnableProgramMethod(), getCallOptions(), request); } /** * * * <pre> * Disable participation in the specified program for the account. * </pre> */ public com.google.shopping.merchant.accounts.v1.Program disableProgram( com.google.shopping.merchant.accounts.v1.DisableProgramRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDisableProgramMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do limited synchronous rpc calls to service ProgramsService. * * <pre> * Service for program management. * Programs provide a mechanism for adding functionality to merchant accounts. A * typical example of this is the [Free product * listings](https://support.google.com/merchants/answer/13889434) * program, which enables products from a merchant's store to be shown across * Google for free. * This service exposes methods to retrieve a business's * participation in all available programs, in addition to methods for * explicitly enabling or disabling participation in each program. * </pre> */ public static final class ProgramsServiceBlockingStub extends io.grpc.stub.AbstractBlockingStub<ProgramsServiceBlockingStub> { private ProgramsServiceBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected ProgramsServiceBlockingStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ProgramsServiceBlockingStub(channel, callOptions); } /** * * * <pre> * Retrieves the specified program for the account. * </pre> */ public com.google.shopping.merchant.accounts.v1.Program getProgram( com.google.shopping.merchant.accounts.v1.GetProgramRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetProgramMethod(), getCallOptions(), request); } /** * * * <pre> * Retrieves all programs for the account. * </pre> */ public com.google.shopping.merchant.accounts.v1.ListProgramsResponse listPrograms( com.google.shopping.merchant.accounts.v1.ListProgramsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListProgramsMethod(), getCallOptions(), request); } /** * * * <pre> * Enable participation in the specified program for the account. * </pre> */ public com.google.shopping.merchant.accounts.v1.Program enableProgram( com.google.shopping.merchant.accounts.v1.EnableProgramRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getEnableProgramMethod(), getCallOptions(), request); } /** * * * <pre> * Disable participation in the specified program for the account. * </pre> */ public com.google.shopping.merchant.accounts.v1.Program disableProgram( com.google.shopping.merchant.accounts.v1.DisableProgramRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDisableProgramMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do ListenableFuture-style rpc calls to service ProgramsService. * * <pre> * Service for program management. * Programs provide a mechanism for adding functionality to merchant accounts. A * typical example of this is the [Free product * listings](https://support.google.com/merchants/answer/13889434) * program, which enables products from a merchant's store to be shown across * Google for free. * This service exposes methods to retrieve a business's * participation in all available programs, in addition to methods for * explicitly enabling or disabling participation in each program. * </pre> */ public static final class ProgramsServiceFutureStub extends io.grpc.stub.AbstractFutureStub<ProgramsServiceFutureStub> { private ProgramsServiceFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected ProgramsServiceFutureStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new ProgramsServiceFutureStub(channel, callOptions); } /** * * * <pre> * Retrieves the specified program for the account. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.shopping.merchant.accounts.v1.Program> getProgram(com.google.shopping.merchant.accounts.v1.GetProgramRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getGetProgramMethod(), getCallOptions()), request); } /** * * * <pre> * Retrieves all programs for the account. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.shopping.merchant.accounts.v1.ListProgramsResponse> listPrograms(com.google.shopping.merchant.accounts.v1.ListProgramsRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getListProgramsMethod(), getCallOptions()), request); } /** * * * <pre> * Enable participation in the specified program for the account. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.shopping.merchant.accounts.v1.Program> enableProgram(com.google.shopping.merchant.accounts.v1.EnableProgramRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getEnableProgramMethod(), getCallOptions()), request); } /** * * * <pre> * Disable participation in the specified program for the account. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.shopping.merchant.accounts.v1.Program> disableProgram(com.google.shopping.merchant.accounts.v1.DisableProgramRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getDisableProgramMethod(), getCallOptions()), request); } } private static final int METHODID_GET_PROGRAM = 0; private static final int METHODID_LIST_PROGRAMS = 1; private static final int METHODID_ENABLE_PROGRAM = 2; private static final int METHODID_DISABLE_PROGRAM = 3; private static final class MethodHandlers<Req, Resp> implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>, io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> { private final AsyncService serviceImpl; private final int methodId; MethodHandlers(AsyncService serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_GET_PROGRAM: serviceImpl.getProgram( (com.google.shopping.merchant.accounts.v1.GetProgramRequest) request, (io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.Program>) responseObserver); break; case METHODID_LIST_PROGRAMS: serviceImpl.listPrograms( (com.google.shopping.merchant.accounts.v1.ListProgramsRequest) request, (io.grpc.stub.StreamObserver< com.google.shopping.merchant.accounts.v1.ListProgramsResponse>) responseObserver); break; case METHODID_ENABLE_PROGRAM: serviceImpl.enableProgram( (com.google.shopping.merchant.accounts.v1.EnableProgramRequest) request, (io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.Program>) responseObserver); break; case METHODID_DISABLE_PROGRAM: serviceImpl.disableProgram( (com.google.shopping.merchant.accounts.v1.DisableProgramRequest) request, (io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.Program>) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver<Req> invoke( io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getGetProgramMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.shopping.merchant.accounts.v1.GetProgramRequest, com.google.shopping.merchant.accounts.v1.Program>( service, METHODID_GET_PROGRAM))) .addMethod( getListProgramsMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.shopping.merchant.accounts.v1.ListProgramsRequest, com.google.shopping.merchant.accounts.v1.ListProgramsResponse>( service, METHODID_LIST_PROGRAMS))) .addMethod( getEnableProgramMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.shopping.merchant.accounts.v1.EnableProgramRequest, com.google.shopping.merchant.accounts.v1.Program>( service, METHODID_ENABLE_PROGRAM))) .addMethod( getDisableProgramMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.shopping.merchant.accounts.v1.DisableProgramRequest, com.google.shopping.merchant.accounts.v1.Program>( service, METHODID_DISABLE_PROGRAM))) .build(); } private abstract static class ProgramsServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { ProgramsServiceBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.google.shopping.merchant.accounts.v1.ProgramsProto.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("ProgramsService"); } } private static final class ProgramsServiceFileDescriptorSupplier extends ProgramsServiceBaseDescriptorSupplier { ProgramsServiceFileDescriptorSupplier() {} } private static final class ProgramsServiceMethodDescriptorSupplier extends ProgramsServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final java.lang.String methodName; ProgramsServiceMethodDescriptorSupplier(java.lang.String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (ProgramsServiceGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new ProgramsServiceFileDescriptorSupplier()) .addMethod(getGetProgramMethod()) .addMethod(getListProgramsMethod()) .addMethod(getEnableProgramMethod()) .addMethod(getDisableProgramMethod()) .build(); } } } return result; } }
googleapis/google-cloud-java
35,899
java-aiplatform/proto-google-cloud-aiplatform-v1beta1/src/main/java/com/google/cloud/aiplatform/v1beta1/PairwiseMetricResult.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1beta1/evaluation_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1beta1; /** * * * <pre> * Spec for pairwise metric result. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.PairwiseMetricResult} */ public final class PairwiseMetricResult extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1beta1.PairwiseMetricResult) PairwiseMetricResultOrBuilder { private static final long serialVersionUID = 0L; // Use PairwiseMetricResult.newBuilder() to construct. private PairwiseMetricResult(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private PairwiseMetricResult() { pairwiseChoice_ = 0; explanation_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new PairwiseMetricResult(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1beta1_PairwiseMetricResult_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1beta1_PairwiseMetricResult_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult.class, com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult.Builder.class); } private int bitField0_; public static final int PAIRWISE_CHOICE_FIELD_NUMBER = 1; private int pairwiseChoice_ = 0; /** * * * <pre> * Output only. Pairwise metric choice. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseChoice pairwise_choice = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The enum numeric value on the wire for pairwiseChoice. */ @java.lang.Override public int getPairwiseChoiceValue() { return pairwiseChoice_; } /** * * * <pre> * Output only. Pairwise metric choice. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseChoice pairwise_choice = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The pairwiseChoice. */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.PairwiseChoice getPairwiseChoice() { com.google.cloud.aiplatform.v1beta1.PairwiseChoice result = com.google.cloud.aiplatform.v1beta1.PairwiseChoice.forNumber(pairwiseChoice_); return result == null ? com.google.cloud.aiplatform.v1beta1.PairwiseChoice.UNRECOGNIZED : result; } public static final int EXPLANATION_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object explanation_ = ""; /** * * * <pre> * Output only. Explanation for pairwise metric score. * </pre> * * <code>string explanation = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The explanation. */ @java.lang.Override public java.lang.String getExplanation() { java.lang.Object ref = explanation_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); explanation_ = s; return s; } } /** * * * <pre> * Output only. Explanation for pairwise metric score. * </pre> * * <code>string explanation = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for explanation. */ @java.lang.Override public com.google.protobuf.ByteString getExplanationBytes() { java.lang.Object ref = explanation_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); explanation_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int CUSTOM_OUTPUT_FIELD_NUMBER = 3; private com.google.cloud.aiplatform.v1beta1.CustomOutput customOutput_; /** * * * <pre> * Output only. Spec for custom output. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.CustomOutput custom_output = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the customOutput field is set. */ @java.lang.Override public boolean hasCustomOutput() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Output only. Spec for custom output. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.CustomOutput custom_output = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The customOutput. */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.CustomOutput getCustomOutput() { return customOutput_ == null ? com.google.cloud.aiplatform.v1beta1.CustomOutput.getDefaultInstance() : customOutput_; } /** * * * <pre> * Output only. Spec for custom output. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.CustomOutput custom_output = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.CustomOutputOrBuilder getCustomOutputOrBuilder() { return customOutput_ == null ? com.google.cloud.aiplatform.v1beta1.CustomOutput.getDefaultInstance() : customOutput_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (pairwiseChoice_ != com.google.cloud.aiplatform.v1beta1.PairwiseChoice.PAIRWISE_CHOICE_UNSPECIFIED .getNumber()) { output.writeEnum(1, pairwiseChoice_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(explanation_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, explanation_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(3, getCustomOutput()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (pairwiseChoice_ != com.google.cloud.aiplatform.v1beta1.PairwiseChoice.PAIRWISE_CHOICE_UNSPECIFIED .getNumber()) { size += com.google.protobuf.CodedOutputStream.computeEnumSize(1, pairwiseChoice_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(explanation_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, explanation_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getCustomOutput()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult)) { return super.equals(obj); } com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult other = (com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult) obj; if (pairwiseChoice_ != other.pairwiseChoice_) return false; if (!getExplanation().equals(other.getExplanation())) return false; if (hasCustomOutput() != other.hasCustomOutput()) return false; if (hasCustomOutput()) { if (!getCustomOutput().equals(other.getCustomOutput())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PAIRWISE_CHOICE_FIELD_NUMBER; hash = (53 * hash) + pairwiseChoice_; hash = (37 * hash) + EXPLANATION_FIELD_NUMBER; hash = (53 * hash) + getExplanation().hashCode(); if (hasCustomOutput()) { hash = (37 * hash) + CUSTOM_OUTPUT_FIELD_NUMBER; hash = (53 * hash) + getCustomOutput().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Spec for pairwise metric result. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1beta1.PairwiseMetricResult} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1beta1.PairwiseMetricResult) com.google.cloud.aiplatform.v1beta1.PairwiseMetricResultOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1beta1_PairwiseMetricResult_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1beta1_PairwiseMetricResult_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult.class, com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult.Builder.class); } // Construct using com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getCustomOutputFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; pairwiseChoice_ = 0; explanation_ = ""; customOutput_ = null; if (customOutputBuilder_ != null) { customOutputBuilder_.dispose(); customOutputBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1beta1.EvaluationServiceProto .internal_static_google_cloud_aiplatform_v1beta1_PairwiseMetricResult_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult build() { com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult buildPartial() { com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult result = new com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.pairwiseChoice_ = pairwiseChoice_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.explanation_ = explanation_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000004) != 0)) { result.customOutput_ = customOutputBuilder_ == null ? customOutput_ : customOutputBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult) { return mergeFrom((com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult other) { if (other == com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult.getDefaultInstance()) return this; if (other.pairwiseChoice_ != 0) { setPairwiseChoiceValue(other.getPairwiseChoiceValue()); } if (!other.getExplanation().isEmpty()) { explanation_ = other.explanation_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasCustomOutput()) { mergeCustomOutput(other.getCustomOutput()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 8: { pairwiseChoice_ = input.readEnum(); bitField0_ |= 0x00000001; break; } // case 8 case 18: { explanation_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { input.readMessage(getCustomOutputFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private int pairwiseChoice_ = 0; /** * * * <pre> * Output only. Pairwise metric choice. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseChoice pairwise_choice = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The enum numeric value on the wire for pairwiseChoice. */ @java.lang.Override public int getPairwiseChoiceValue() { return pairwiseChoice_; } /** * * * <pre> * Output only. Pairwise metric choice. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseChoice pairwise_choice = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @param value The enum numeric value on the wire for pairwiseChoice to set. * @return This builder for chaining. */ public Builder setPairwiseChoiceValue(int value) { pairwiseChoice_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Output only. Pairwise metric choice. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseChoice pairwise_choice = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The pairwiseChoice. */ @java.lang.Override public com.google.cloud.aiplatform.v1beta1.PairwiseChoice getPairwiseChoice() { com.google.cloud.aiplatform.v1beta1.PairwiseChoice result = com.google.cloud.aiplatform.v1beta1.PairwiseChoice.forNumber(pairwiseChoice_); return result == null ? com.google.cloud.aiplatform.v1beta1.PairwiseChoice.UNRECOGNIZED : result; } /** * * * <pre> * Output only. Pairwise metric choice. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseChoice pairwise_choice = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @param value The pairwiseChoice to set. * @return This builder for chaining. */ public Builder setPairwiseChoice(com.google.cloud.aiplatform.v1beta1.PairwiseChoice value) { if (value == null) { throw new NullPointerException(); } bitField0_ |= 0x00000001; pairwiseChoice_ = value.getNumber(); onChanged(); return this; } /** * * * <pre> * Output only. Pairwise metric choice. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.PairwiseChoice pairwise_choice = 1 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return This builder for chaining. */ public Builder clearPairwiseChoice() { bitField0_ = (bitField0_ & ~0x00000001); pairwiseChoice_ = 0; onChanged(); return this; } private java.lang.Object explanation_ = ""; /** * * * <pre> * Output only. Explanation for pairwise metric score. * </pre> * * <code>string explanation = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The explanation. */ public java.lang.String getExplanation() { java.lang.Object ref = explanation_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); explanation_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Output only. Explanation for pairwise metric score. * </pre> * * <code>string explanation = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return The bytes for explanation. */ public com.google.protobuf.ByteString getExplanationBytes() { java.lang.Object ref = explanation_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); explanation_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Output only. Explanation for pairwise metric score. * </pre> * * <code>string explanation = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The explanation to set. * @return This builder for chaining. */ public Builder setExplanation(java.lang.String value) { if (value == null) { throw new NullPointerException(); } explanation_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Output only. Explanation for pairwise metric score. * </pre> * * <code>string explanation = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @return This builder for chaining. */ public Builder clearExplanation() { explanation_ = getDefaultInstance().getExplanation(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Output only. Explanation for pairwise metric score. * </pre> * * <code>string explanation = 2 [(.google.api.field_behavior) = OUTPUT_ONLY];</code> * * @param value The bytes for explanation to set. * @return This builder for chaining. */ public Builder setExplanationBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); explanation_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private com.google.cloud.aiplatform.v1beta1.CustomOutput customOutput_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.CustomOutput, com.google.cloud.aiplatform.v1beta1.CustomOutput.Builder, com.google.cloud.aiplatform.v1beta1.CustomOutputOrBuilder> customOutputBuilder_; /** * * * <pre> * Output only. Spec for custom output. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.CustomOutput custom_output = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return Whether the customOutput field is set. */ public boolean hasCustomOutput() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * Output only. Spec for custom output. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.CustomOutput custom_output = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> * * @return The customOutput. */ public com.google.cloud.aiplatform.v1beta1.CustomOutput getCustomOutput() { if (customOutputBuilder_ == null) { return customOutput_ == null ? com.google.cloud.aiplatform.v1beta1.CustomOutput.getDefaultInstance() : customOutput_; } else { return customOutputBuilder_.getMessage(); } } /** * * * <pre> * Output only. Spec for custom output. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.CustomOutput custom_output = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder setCustomOutput(com.google.cloud.aiplatform.v1beta1.CustomOutput value) { if (customOutputBuilder_ == null) { if (value == null) { throw new NullPointerException(); } customOutput_ = value; } else { customOutputBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Output only. Spec for custom output. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.CustomOutput custom_output = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder setCustomOutput( com.google.cloud.aiplatform.v1beta1.CustomOutput.Builder builderForValue) { if (customOutputBuilder_ == null) { customOutput_ = builderForValue.build(); } else { customOutputBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Output only. Spec for custom output. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.CustomOutput custom_output = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder mergeCustomOutput(com.google.cloud.aiplatform.v1beta1.CustomOutput value) { if (customOutputBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && customOutput_ != null && customOutput_ != com.google.cloud.aiplatform.v1beta1.CustomOutput.getDefaultInstance()) { getCustomOutputBuilder().mergeFrom(value); } else { customOutput_ = value; } } else { customOutputBuilder_.mergeFrom(value); } if (customOutput_ != null) { bitField0_ |= 0x00000004; onChanged(); } return this; } /** * * * <pre> * Output only. Spec for custom output. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.CustomOutput custom_output = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public Builder clearCustomOutput() { bitField0_ = (bitField0_ & ~0x00000004); customOutput_ = null; if (customOutputBuilder_ != null) { customOutputBuilder_.dispose(); customOutputBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Output only. Spec for custom output. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.CustomOutput custom_output = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.cloud.aiplatform.v1beta1.CustomOutput.Builder getCustomOutputBuilder() { bitField0_ |= 0x00000004; onChanged(); return getCustomOutputFieldBuilder().getBuilder(); } /** * * * <pre> * Output only. Spec for custom output. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.CustomOutput custom_output = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ public com.google.cloud.aiplatform.v1beta1.CustomOutputOrBuilder getCustomOutputOrBuilder() { if (customOutputBuilder_ != null) { return customOutputBuilder_.getMessageOrBuilder(); } else { return customOutput_ == null ? com.google.cloud.aiplatform.v1beta1.CustomOutput.getDefaultInstance() : customOutput_; } } /** * * * <pre> * Output only. Spec for custom output. * </pre> * * <code> * .google.cloud.aiplatform.v1beta1.CustomOutput custom_output = 3 [(.google.api.field_behavior) = OUTPUT_ONLY]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.CustomOutput, com.google.cloud.aiplatform.v1beta1.CustomOutput.Builder, com.google.cloud.aiplatform.v1beta1.CustomOutputOrBuilder> getCustomOutputFieldBuilder() { if (customOutputBuilder_ == null) { customOutputBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.aiplatform.v1beta1.CustomOutput, com.google.cloud.aiplatform.v1beta1.CustomOutput.Builder, com.google.cloud.aiplatform.v1beta1.CustomOutputOrBuilder>( getCustomOutput(), getParentForChildren(), isClean()); customOutput_ = null; } return customOutputBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1beta1.PairwiseMetricResult) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1beta1.PairwiseMetricResult) private static final com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult(); } public static com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<PairwiseMetricResult> PARSER = new com.google.protobuf.AbstractParser<PairwiseMetricResult>() { @java.lang.Override public PairwiseMetricResult parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<PairwiseMetricResult> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<PairwiseMetricResult> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1beta1.PairwiseMetricResult getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,830
java-dialogflow/proto-google-cloud-dialogflow-v2/src/main/java/com/google/cloud/dialogflow/v2/ImportAgentRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/v2/agent.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dialogflow.v2; /** * * * <pre> * The request message for * [Agents.ImportAgent][google.cloud.dialogflow.v2.Agents.ImportAgent]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.v2.ImportAgentRequest} */ public final class ImportAgentRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2.ImportAgentRequest) ImportAgentRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ImportAgentRequest.newBuilder() to construct. private ImportAgentRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ImportAgentRequest() { parent_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ImportAgentRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.v2.AgentProto .internal_static_google_cloud_dialogflow_v2_ImportAgentRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.v2.AgentProto .internal_static_google_cloud_dialogflow_v2_ImportAgentRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.v2.ImportAgentRequest.class, com.google.cloud.dialogflow.v2.ImportAgentRequest.Builder.class); } private int agentCase_ = 0; @SuppressWarnings("serial") private java.lang.Object agent_; public enum AgentCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { AGENT_URI(2), AGENT_CONTENT(3), AGENT_NOT_SET(0); private final int value; private AgentCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static AgentCase valueOf(int value) { return forNumber(value); } public static AgentCase forNumber(int value) { switch (value) { case 2: return AGENT_URI; case 3: return AGENT_CONTENT; case 0: return AGENT_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public AgentCase getAgentCase() { return AgentCase.forNumber(agentCase_); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The project that the agent to import is associated with. * Format: `projects/&lt;Project ID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The project that the agent to import is associated with. * Format: `projects/&lt;Project ID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int AGENT_URI_FIELD_NUMBER = 2; /** * * * <pre> * The URI to a Google Cloud Storage file containing the agent to import. * Note: The URI must start with "gs://". * * Dialogflow performs a read operation for the Cloud Storage object * on the caller's behalf, so your request authentication must * have read permissions for the object. For more information, see * [Dialogflow access * control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage). * </pre> * * <code>string agent_uri = 2;</code> * * @return Whether the agentUri field is set. */ public boolean hasAgentUri() { return agentCase_ == 2; } /** * * * <pre> * The URI to a Google Cloud Storage file containing the agent to import. * Note: The URI must start with "gs://". * * Dialogflow performs a read operation for the Cloud Storage object * on the caller's behalf, so your request authentication must * have read permissions for the object. For more information, see * [Dialogflow access * control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage). * </pre> * * <code>string agent_uri = 2;</code> * * @return The agentUri. */ public java.lang.String getAgentUri() { java.lang.Object ref = ""; if (agentCase_ == 2) { ref = agent_; } if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (agentCase_ == 2) { agent_ = s; } return s; } } /** * * * <pre> * The URI to a Google Cloud Storage file containing the agent to import. * Note: The URI must start with "gs://". * * Dialogflow performs a read operation for the Cloud Storage object * on the caller's behalf, so your request authentication must * have read permissions for the object. For more information, see * [Dialogflow access * control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage). * </pre> * * <code>string agent_uri = 2;</code> * * @return The bytes for agentUri. */ public com.google.protobuf.ByteString getAgentUriBytes() { java.lang.Object ref = ""; if (agentCase_ == 2) { ref = agent_; } if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); if (agentCase_ == 2) { agent_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int AGENT_CONTENT_FIELD_NUMBER = 3; /** * * * <pre> * Zip compressed raw byte content for agent. * </pre> * * <code>bytes agent_content = 3;</code> * * @return Whether the agentContent field is set. */ @java.lang.Override public boolean hasAgentContent() { return agentCase_ == 3; } /** * * * <pre> * Zip compressed raw byte content for agent. * </pre> * * <code>bytes agent_content = 3;</code> * * @return The agentContent. */ @java.lang.Override public com.google.protobuf.ByteString getAgentContent() { if (agentCase_ == 3) { return (com.google.protobuf.ByteString) agent_; } return com.google.protobuf.ByteString.EMPTY; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (agentCase_ == 2) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, agent_); } if (agentCase_ == 3) { output.writeBytes(3, (com.google.protobuf.ByteString) agent_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (agentCase_ == 2) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, agent_); } if (agentCase_ == 3) { size += com.google.protobuf.CodedOutputStream.computeBytesSize( 3, (com.google.protobuf.ByteString) agent_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.v2.ImportAgentRequest)) { return super.equals(obj); } com.google.cloud.dialogflow.v2.ImportAgentRequest other = (com.google.cloud.dialogflow.v2.ImportAgentRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getAgentCase().equals(other.getAgentCase())) return false; switch (agentCase_) { case 2: if (!getAgentUri().equals(other.getAgentUri())) return false; break; case 3: if (!getAgentContent().equals(other.getAgentContent())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); switch (agentCase_) { case 2: hash = (37 * hash) + AGENT_URI_FIELD_NUMBER; hash = (53 * hash) + getAgentUri().hashCode(); break; case 3: hash = (37 * hash) + AGENT_CONTENT_FIELD_NUMBER; hash = (53 * hash) + getAgentContent().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.v2.ImportAgentRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2.ImportAgentRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2.ImportAgentRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2.ImportAgentRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2.ImportAgentRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2.ImportAgentRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2.ImportAgentRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2.ImportAgentRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.v2.ImportAgentRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2.ImportAgentRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.v2.ImportAgentRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2.ImportAgentRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.dialogflow.v2.ImportAgentRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The request message for * [Agents.ImportAgent][google.cloud.dialogflow.v2.Agents.ImportAgent]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.v2.ImportAgentRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2.ImportAgentRequest) com.google.cloud.dialogflow.v2.ImportAgentRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.v2.AgentProto .internal_static_google_cloud_dialogflow_v2_ImportAgentRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.v2.AgentProto .internal_static_google_cloud_dialogflow_v2_ImportAgentRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.v2.ImportAgentRequest.class, com.google.cloud.dialogflow.v2.ImportAgentRequest.Builder.class); } // Construct using com.google.cloud.dialogflow.v2.ImportAgentRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; agentCase_ = 0; agent_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.v2.AgentProto .internal_static_google_cloud_dialogflow_v2_ImportAgentRequest_descriptor; } @java.lang.Override public com.google.cloud.dialogflow.v2.ImportAgentRequest getDefaultInstanceForType() { return com.google.cloud.dialogflow.v2.ImportAgentRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dialogflow.v2.ImportAgentRequest build() { com.google.cloud.dialogflow.v2.ImportAgentRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dialogflow.v2.ImportAgentRequest buildPartial() { com.google.cloud.dialogflow.v2.ImportAgentRequest result = new com.google.cloud.dialogflow.v2.ImportAgentRequest(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.cloud.dialogflow.v2.ImportAgentRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } } private void buildPartialOneofs(com.google.cloud.dialogflow.v2.ImportAgentRequest result) { result.agentCase_ = agentCase_; result.agent_ = this.agent_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.v2.ImportAgentRequest) { return mergeFrom((com.google.cloud.dialogflow.v2.ImportAgentRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dialogflow.v2.ImportAgentRequest other) { if (other == com.google.cloud.dialogflow.v2.ImportAgentRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } switch (other.getAgentCase()) { case AGENT_URI: { agentCase_ = 2; agent_ = other.agent_; onChanged(); break; } case AGENT_CONTENT: { setAgentContent(other.getAgentContent()); break; } case AGENT_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { java.lang.String s = input.readStringRequireUtf8(); agentCase_ = 2; agent_ = s; break; } // case 18 case 26: { agent_ = input.readBytes(); agentCase_ = 3; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int agentCase_ = 0; private java.lang.Object agent_; public AgentCase getAgentCase() { return AgentCase.forNumber(agentCase_); } public Builder clearAgent() { agentCase_ = 0; agent_ = null; onChanged(); return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The project that the agent to import is associated with. * Format: `projects/&lt;Project ID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The project that the agent to import is associated with. * Format: `projects/&lt;Project ID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The project that the agent to import is associated with. * Format: `projects/&lt;Project ID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The project that the agent to import is associated with. * Format: `projects/&lt;Project ID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The project that the agent to import is associated with. * Format: `projects/&lt;Project ID&gt;`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The URI to a Google Cloud Storage file containing the agent to import. * Note: The URI must start with "gs://". * * Dialogflow performs a read operation for the Cloud Storage object * on the caller's behalf, so your request authentication must * have read permissions for the object. For more information, see * [Dialogflow access * control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage). * </pre> * * <code>string agent_uri = 2;</code> * * @return Whether the agentUri field is set. */ @java.lang.Override public boolean hasAgentUri() { return agentCase_ == 2; } /** * * * <pre> * The URI to a Google Cloud Storage file containing the agent to import. * Note: The URI must start with "gs://". * * Dialogflow performs a read operation for the Cloud Storage object * on the caller's behalf, so your request authentication must * have read permissions for the object. For more information, see * [Dialogflow access * control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage). * </pre> * * <code>string agent_uri = 2;</code> * * @return The agentUri. */ @java.lang.Override public java.lang.String getAgentUri() { java.lang.Object ref = ""; if (agentCase_ == 2) { ref = agent_; } if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); if (agentCase_ == 2) { agent_ = s; } return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The URI to a Google Cloud Storage file containing the agent to import. * Note: The URI must start with "gs://". * * Dialogflow performs a read operation for the Cloud Storage object * on the caller's behalf, so your request authentication must * have read permissions for the object. For more information, see * [Dialogflow access * control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage). * </pre> * * <code>string agent_uri = 2;</code> * * @return The bytes for agentUri. */ @java.lang.Override public com.google.protobuf.ByteString getAgentUriBytes() { java.lang.Object ref = ""; if (agentCase_ == 2) { ref = agent_; } if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); if (agentCase_ == 2) { agent_ = b; } return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The URI to a Google Cloud Storage file containing the agent to import. * Note: The URI must start with "gs://". * * Dialogflow performs a read operation for the Cloud Storage object * on the caller's behalf, so your request authentication must * have read permissions for the object. For more information, see * [Dialogflow access * control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage). * </pre> * * <code>string agent_uri = 2;</code> * * @param value The agentUri to set. * @return This builder for chaining. */ public Builder setAgentUri(java.lang.String value) { if (value == null) { throw new NullPointerException(); } agentCase_ = 2; agent_ = value; onChanged(); return this; } /** * * * <pre> * The URI to a Google Cloud Storage file containing the agent to import. * Note: The URI must start with "gs://". * * Dialogflow performs a read operation for the Cloud Storage object * on the caller's behalf, so your request authentication must * have read permissions for the object. For more information, see * [Dialogflow access * control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage). * </pre> * * <code>string agent_uri = 2;</code> * * @return This builder for chaining. */ public Builder clearAgentUri() { if (agentCase_ == 2) { agentCase_ = 0; agent_ = null; onChanged(); } return this; } /** * * * <pre> * The URI to a Google Cloud Storage file containing the agent to import. * Note: The URI must start with "gs://". * * Dialogflow performs a read operation for the Cloud Storage object * on the caller's behalf, so your request authentication must * have read permissions for the object. For more information, see * [Dialogflow access * control](https://cloud.google.com/dialogflow/cx/docs/concept/access-control#storage). * </pre> * * <code>string agent_uri = 2;</code> * * @param value The bytes for agentUri to set. * @return This builder for chaining. */ public Builder setAgentUriBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); agentCase_ = 2; agent_ = value; onChanged(); return this; } /** * * * <pre> * Zip compressed raw byte content for agent. * </pre> * * <code>bytes agent_content = 3;</code> * * @return Whether the agentContent field is set. */ public boolean hasAgentContent() { return agentCase_ == 3; } /** * * * <pre> * Zip compressed raw byte content for agent. * </pre> * * <code>bytes agent_content = 3;</code> * * @return The agentContent. */ public com.google.protobuf.ByteString getAgentContent() { if (agentCase_ == 3) { return (com.google.protobuf.ByteString) agent_; } return com.google.protobuf.ByteString.EMPTY; } /** * * * <pre> * Zip compressed raw byte content for agent. * </pre> * * <code>bytes agent_content = 3;</code> * * @param value The agentContent to set. * @return This builder for chaining. */ public Builder setAgentContent(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } agentCase_ = 3; agent_ = value; onChanged(); return this; } /** * * * <pre> * Zip compressed raw byte content for agent. * </pre> * * <code>bytes agent_content = 3;</code> * * @return This builder for chaining. */ public Builder clearAgentContent() { if (agentCase_ == 3) { agentCase_ = 0; agent_ = null; onChanged(); } return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2.ImportAgentRequest) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2.ImportAgentRequest) private static final com.google.cloud.dialogflow.v2.ImportAgentRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2.ImportAgentRequest(); } public static com.google.cloud.dialogflow.v2.ImportAgentRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ImportAgentRequest> PARSER = new com.google.protobuf.AbstractParser<ImportAgentRequest>() { @java.lang.Override public ImportAgentRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ImportAgentRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ImportAgentRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dialogflow.v2.ImportAgentRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,881
java-alloydb/proto-google-cloud-alloydb-v1beta/src/main/java/com/google/cloud/alloydb/v1beta/ListDatabasesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/alloydb/v1beta/service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.alloydb.v1beta; /** * * * <pre> * Message for ListDatabases response. * </pre> * * Protobuf type {@code google.cloud.alloydb.v1beta.ListDatabasesResponse} */ public final class ListDatabasesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.alloydb.v1beta.ListDatabasesResponse) ListDatabasesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListDatabasesResponse.newBuilder() to construct. private ListDatabasesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListDatabasesResponse() { databases_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListDatabasesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.alloydb.v1beta.ServiceProto .internal_static_google_cloud_alloydb_v1beta_ListDatabasesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.alloydb.v1beta.ServiceProto .internal_static_google_cloud_alloydb_v1beta_ListDatabasesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.alloydb.v1beta.ListDatabasesResponse.class, com.google.cloud.alloydb.v1beta.ListDatabasesResponse.Builder.class); } public static final int DATABASES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.alloydb.v1beta.Database> databases_; /** * * * <pre> * The list of databases. * </pre> * * <code>repeated .google.cloud.alloydb.v1beta.Database databases = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.alloydb.v1beta.Database> getDatabasesList() { return databases_; } /** * * * <pre> * The list of databases. * </pre> * * <code>repeated .google.cloud.alloydb.v1beta.Database databases = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.alloydb.v1beta.DatabaseOrBuilder> getDatabasesOrBuilderList() { return databases_; } /** * * * <pre> * The list of databases. * </pre> * * <code>repeated .google.cloud.alloydb.v1beta.Database databases = 1;</code> */ @java.lang.Override public int getDatabasesCount() { return databases_.size(); } /** * * * <pre> * The list of databases. * </pre> * * <code>repeated .google.cloud.alloydb.v1beta.Database databases = 1;</code> */ @java.lang.Override public com.google.cloud.alloydb.v1beta.Database getDatabases(int index) { return databases_.get(index); } /** * * * <pre> * The list of databases. * </pre> * * <code>repeated .google.cloud.alloydb.v1beta.Database databases = 1;</code> */ @java.lang.Override public com.google.cloud.alloydb.v1beta.DatabaseOrBuilder getDatabasesOrBuilder(int index) { return databases_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token identifying the next page of results the server should return. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token identifying the next page of results the server should return. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < databases_.size(); i++) { output.writeMessage(1, databases_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < databases_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, databases_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.alloydb.v1beta.ListDatabasesResponse)) { return super.equals(obj); } com.google.cloud.alloydb.v1beta.ListDatabasesResponse other = (com.google.cloud.alloydb.v1beta.ListDatabasesResponse) obj; if (!getDatabasesList().equals(other.getDatabasesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getDatabasesCount() > 0) { hash = (37 * hash) + DATABASES_FIELD_NUMBER; hash = (53 * hash) + getDatabasesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.alloydb.v1beta.ListDatabasesResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.v1beta.ListDatabasesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.v1beta.ListDatabasesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.v1beta.ListDatabasesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.v1beta.ListDatabasesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.alloydb.v1beta.ListDatabasesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.alloydb.v1beta.ListDatabasesResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.alloydb.v1beta.ListDatabasesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.alloydb.v1beta.ListDatabasesResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.alloydb.v1beta.ListDatabasesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.alloydb.v1beta.ListDatabasesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.alloydb.v1beta.ListDatabasesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.alloydb.v1beta.ListDatabasesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message for ListDatabases response. * </pre> * * Protobuf type {@code google.cloud.alloydb.v1beta.ListDatabasesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.alloydb.v1beta.ListDatabasesResponse) com.google.cloud.alloydb.v1beta.ListDatabasesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.alloydb.v1beta.ServiceProto .internal_static_google_cloud_alloydb_v1beta_ListDatabasesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.alloydb.v1beta.ServiceProto .internal_static_google_cloud_alloydb_v1beta_ListDatabasesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.alloydb.v1beta.ListDatabasesResponse.class, com.google.cloud.alloydb.v1beta.ListDatabasesResponse.Builder.class); } // Construct using com.google.cloud.alloydb.v1beta.ListDatabasesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (databasesBuilder_ == null) { databases_ = java.util.Collections.emptyList(); } else { databases_ = null; databasesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.alloydb.v1beta.ServiceProto .internal_static_google_cloud_alloydb_v1beta_ListDatabasesResponse_descriptor; } @java.lang.Override public com.google.cloud.alloydb.v1beta.ListDatabasesResponse getDefaultInstanceForType() { return com.google.cloud.alloydb.v1beta.ListDatabasesResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.alloydb.v1beta.ListDatabasesResponse build() { com.google.cloud.alloydb.v1beta.ListDatabasesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.alloydb.v1beta.ListDatabasesResponse buildPartial() { com.google.cloud.alloydb.v1beta.ListDatabasesResponse result = new com.google.cloud.alloydb.v1beta.ListDatabasesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.alloydb.v1beta.ListDatabasesResponse result) { if (databasesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { databases_ = java.util.Collections.unmodifiableList(databases_); bitField0_ = (bitField0_ & ~0x00000001); } result.databases_ = databases_; } else { result.databases_ = databasesBuilder_.build(); } } private void buildPartial0(com.google.cloud.alloydb.v1beta.ListDatabasesResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.alloydb.v1beta.ListDatabasesResponse) { return mergeFrom((com.google.cloud.alloydb.v1beta.ListDatabasesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.alloydb.v1beta.ListDatabasesResponse other) { if (other == com.google.cloud.alloydb.v1beta.ListDatabasesResponse.getDefaultInstance()) return this; if (databasesBuilder_ == null) { if (!other.databases_.isEmpty()) { if (databases_.isEmpty()) { databases_ = other.databases_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureDatabasesIsMutable(); databases_.addAll(other.databases_); } onChanged(); } } else { if (!other.databases_.isEmpty()) { if (databasesBuilder_.isEmpty()) { databasesBuilder_.dispose(); databasesBuilder_ = null; databases_ = other.databases_; bitField0_ = (bitField0_ & ~0x00000001); databasesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getDatabasesFieldBuilder() : null; } else { databasesBuilder_.addAllMessages(other.databases_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.alloydb.v1beta.Database m = input.readMessage( com.google.cloud.alloydb.v1beta.Database.parser(), extensionRegistry); if (databasesBuilder_ == null) { ensureDatabasesIsMutable(); databases_.add(m); } else { databasesBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.alloydb.v1beta.Database> databases_ = java.util.Collections.emptyList(); private void ensureDatabasesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { databases_ = new java.util.ArrayList<com.google.cloud.alloydb.v1beta.Database>(databases_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.alloydb.v1beta.Database, com.google.cloud.alloydb.v1beta.Database.Builder, com.google.cloud.alloydb.v1beta.DatabaseOrBuilder> databasesBuilder_; /** * * * <pre> * The list of databases. * </pre> * * <code>repeated .google.cloud.alloydb.v1beta.Database databases = 1;</code> */ public java.util.List<com.google.cloud.alloydb.v1beta.Database> getDatabasesList() { if (databasesBuilder_ == null) { return java.util.Collections.unmodifiableList(databases_); } else { return databasesBuilder_.getMessageList(); } } /** * * * <pre> * The list of databases. * </pre> * * <code>repeated .google.cloud.alloydb.v1beta.Database databases = 1;</code> */ public int getDatabasesCount() { if (databasesBuilder_ == null) { return databases_.size(); } else { return databasesBuilder_.getCount(); } } /** * * * <pre> * The list of databases. * </pre> * * <code>repeated .google.cloud.alloydb.v1beta.Database databases = 1;</code> */ public com.google.cloud.alloydb.v1beta.Database getDatabases(int index) { if (databasesBuilder_ == null) { return databases_.get(index); } else { return databasesBuilder_.getMessage(index); } } /** * * * <pre> * The list of databases. * </pre> * * <code>repeated .google.cloud.alloydb.v1beta.Database databases = 1;</code> */ public Builder setDatabases(int index, com.google.cloud.alloydb.v1beta.Database value) { if (databasesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureDatabasesIsMutable(); databases_.set(index, value); onChanged(); } else { databasesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The list of databases. * </pre> * * <code>repeated .google.cloud.alloydb.v1beta.Database databases = 1;</code> */ public Builder setDatabases( int index, com.google.cloud.alloydb.v1beta.Database.Builder builderForValue) { if (databasesBuilder_ == null) { ensureDatabasesIsMutable(); databases_.set(index, builderForValue.build()); onChanged(); } else { databasesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of databases. * </pre> * * <code>repeated .google.cloud.alloydb.v1beta.Database databases = 1;</code> */ public Builder addDatabases(com.google.cloud.alloydb.v1beta.Database value) { if (databasesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureDatabasesIsMutable(); databases_.add(value); onChanged(); } else { databasesBuilder_.addMessage(value); } return this; } /** * * * <pre> * The list of databases. * </pre> * * <code>repeated .google.cloud.alloydb.v1beta.Database databases = 1;</code> */ public Builder addDatabases(int index, com.google.cloud.alloydb.v1beta.Database value) { if (databasesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureDatabasesIsMutable(); databases_.add(index, value); onChanged(); } else { databasesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The list of databases. * </pre> * * <code>repeated .google.cloud.alloydb.v1beta.Database databases = 1;</code> */ public Builder addDatabases(com.google.cloud.alloydb.v1beta.Database.Builder builderForValue) { if (databasesBuilder_ == null) { ensureDatabasesIsMutable(); databases_.add(builderForValue.build()); onChanged(); } else { databasesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The list of databases. * </pre> * * <code>repeated .google.cloud.alloydb.v1beta.Database databases = 1;</code> */ public Builder addDatabases( int index, com.google.cloud.alloydb.v1beta.Database.Builder builderForValue) { if (databasesBuilder_ == null) { ensureDatabasesIsMutable(); databases_.add(index, builderForValue.build()); onChanged(); } else { databasesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The list of databases. * </pre> * * <code>repeated .google.cloud.alloydb.v1beta.Database databases = 1;</code> */ public Builder addAllDatabases( java.lang.Iterable<? extends com.google.cloud.alloydb.v1beta.Database> values) { if (databasesBuilder_ == null) { ensureDatabasesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, databases_); onChanged(); } else { databasesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The list of databases. * </pre> * * <code>repeated .google.cloud.alloydb.v1beta.Database databases = 1;</code> */ public Builder clearDatabases() { if (databasesBuilder_ == null) { databases_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { databasesBuilder_.clear(); } return this; } /** * * * <pre> * The list of databases. * </pre> * * <code>repeated .google.cloud.alloydb.v1beta.Database databases = 1;</code> */ public Builder removeDatabases(int index) { if (databasesBuilder_ == null) { ensureDatabasesIsMutable(); databases_.remove(index); onChanged(); } else { databasesBuilder_.remove(index); } return this; } /** * * * <pre> * The list of databases. * </pre> * * <code>repeated .google.cloud.alloydb.v1beta.Database databases = 1;</code> */ public com.google.cloud.alloydb.v1beta.Database.Builder getDatabasesBuilder(int index) { return getDatabasesFieldBuilder().getBuilder(index); } /** * * * <pre> * The list of databases. * </pre> * * <code>repeated .google.cloud.alloydb.v1beta.Database databases = 1;</code> */ public com.google.cloud.alloydb.v1beta.DatabaseOrBuilder getDatabasesOrBuilder(int index) { if (databasesBuilder_ == null) { return databases_.get(index); } else { return databasesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The list of databases. * </pre> * * <code>repeated .google.cloud.alloydb.v1beta.Database databases = 1;</code> */ public java.util.List<? extends com.google.cloud.alloydb.v1beta.DatabaseOrBuilder> getDatabasesOrBuilderList() { if (databasesBuilder_ != null) { return databasesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(databases_); } } /** * * * <pre> * The list of databases. * </pre> * * <code>repeated .google.cloud.alloydb.v1beta.Database databases = 1;</code> */ public com.google.cloud.alloydb.v1beta.Database.Builder addDatabasesBuilder() { return getDatabasesFieldBuilder() .addBuilder(com.google.cloud.alloydb.v1beta.Database.getDefaultInstance()); } /** * * * <pre> * The list of databases. * </pre> * * <code>repeated .google.cloud.alloydb.v1beta.Database databases = 1;</code> */ public com.google.cloud.alloydb.v1beta.Database.Builder addDatabasesBuilder(int index) { return getDatabasesFieldBuilder() .addBuilder(index, com.google.cloud.alloydb.v1beta.Database.getDefaultInstance()); } /** * * * <pre> * The list of databases. * </pre> * * <code>repeated .google.cloud.alloydb.v1beta.Database databases = 1;</code> */ public java.util.List<com.google.cloud.alloydb.v1beta.Database.Builder> getDatabasesBuilderList() { return getDatabasesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.alloydb.v1beta.Database, com.google.cloud.alloydb.v1beta.Database.Builder, com.google.cloud.alloydb.v1beta.DatabaseOrBuilder> getDatabasesFieldBuilder() { if (databasesBuilder_ == null) { databasesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.alloydb.v1beta.Database, com.google.cloud.alloydb.v1beta.Database.Builder, com.google.cloud.alloydb.v1beta.DatabaseOrBuilder>( databases_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); databases_ = null; } return databasesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token identifying the next page of results the server should return. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token identifying the next page of results the server should return. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token identifying the next page of results the server should return. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token identifying the next page of results the server should return. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token identifying the next page of results the server should return. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.alloydb.v1beta.ListDatabasesResponse) } // @@protoc_insertion_point(class_scope:google.cloud.alloydb.v1beta.ListDatabasesResponse) private static final com.google.cloud.alloydb.v1beta.ListDatabasesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.alloydb.v1beta.ListDatabasesResponse(); } public static com.google.cloud.alloydb.v1beta.ListDatabasesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListDatabasesResponse> PARSER = new com.google.protobuf.AbstractParser<ListDatabasesResponse>() { @java.lang.Override public ListDatabasesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListDatabasesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListDatabasesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.alloydb.v1beta.ListDatabasesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,915
java-enterpriseknowledgegraph/google-cloud-enterpriseknowledgegraph/src/test/java/com/google/cloud/enterpriseknowledgegraph/v1/EnterpriseKnowledgeGraphServiceClientTest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.enterpriseknowledgegraph.v1; import static com.google.cloud.enterpriseknowledgegraph.v1.EnterpriseKnowledgeGraphServiceClient.ListEntityReconciliationJobsPagedResponse; import com.google.api.gax.core.NoCredentialsProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.testing.LocalChannelProvider; import com.google.api.gax.grpc.testing.MockGrpcService; import com.google.api.gax.grpc.testing.MockServiceHelper; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.InvalidArgumentException; import com.google.common.collect.Lists; import com.google.protobuf.AbstractMessage; import com.google.protobuf.Empty; import com.google.protobuf.ListValue; import com.google.protobuf.Timestamp; import com.google.protobuf.Value; import com.google.rpc.Status; import io.grpc.StatusRuntimeException; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.List; import java.util.UUID; import javax.annotation.Generated; import org.junit.After; import org.junit.AfterClass; import org.junit.Assert; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Test; @Generated("by gapic-generator-java") public class EnterpriseKnowledgeGraphServiceClientTest { private static MockEnterpriseKnowledgeGraphService mockEnterpriseKnowledgeGraphService; private static MockServiceHelper mockServiceHelper; private LocalChannelProvider channelProvider; private EnterpriseKnowledgeGraphServiceClient client; @BeforeClass public static void startStaticServer() { mockEnterpriseKnowledgeGraphService = new MockEnterpriseKnowledgeGraphService(); mockServiceHelper = new MockServiceHelper( UUID.randomUUID().toString(), Arrays.<MockGrpcService>asList(mockEnterpriseKnowledgeGraphService)); mockServiceHelper.start(); } @AfterClass public static void stopServer() { mockServiceHelper.stop(); } @Before public void setUp() throws IOException { mockServiceHelper.reset(); channelProvider = mockServiceHelper.createChannelProvider(); EnterpriseKnowledgeGraphServiceSettings settings = EnterpriseKnowledgeGraphServiceSettings.newBuilder() .setTransportChannelProvider(channelProvider) .setCredentialsProvider(NoCredentialsProvider.create()) .build(); client = EnterpriseKnowledgeGraphServiceClient.create(settings); } @After public void tearDown() throws Exception { client.close(); } @Test public void createEntityReconciliationJobTest() throws Exception { EntityReconciliationJob expectedResponse = EntityReconciliationJob.newBuilder() .setName( EntityReconciliationJobName.of( "[PROJECT]", "[LOCATION]", "[ENTITY_RECONCILIATION_JOB]") .toString()) .setInputConfig(InputConfig.newBuilder().build()) .setOutputConfig(OutputConfig.newBuilder().build()) .setState(JobState.forNumber(0)) .setError(Status.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setReconConfig(ReconConfig.newBuilder().build()) .build(); mockEnterpriseKnowledgeGraphService.addResponse(expectedResponse); LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); EntityReconciliationJob entityReconciliationJob = EntityReconciliationJob.newBuilder().build(); EntityReconciliationJob actualResponse = client.createEntityReconciliationJob(parent, entityReconciliationJob); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockEnterpriseKnowledgeGraphService.getRequests(); Assert.assertEquals(1, actualRequests.size()); CreateEntityReconciliationJobRequest actualRequest = ((CreateEntityReconciliationJobRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertEquals(entityReconciliationJob, actualRequest.getEntityReconciliationJob()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void createEntityReconciliationJobExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockEnterpriseKnowledgeGraphService.addException(exception); try { LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); EntityReconciliationJob entityReconciliationJob = EntityReconciliationJob.newBuilder().build(); client.createEntityReconciliationJob(parent, entityReconciliationJob); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void createEntityReconciliationJobTest2() throws Exception { EntityReconciliationJob expectedResponse = EntityReconciliationJob.newBuilder() .setName( EntityReconciliationJobName.of( "[PROJECT]", "[LOCATION]", "[ENTITY_RECONCILIATION_JOB]") .toString()) .setInputConfig(InputConfig.newBuilder().build()) .setOutputConfig(OutputConfig.newBuilder().build()) .setState(JobState.forNumber(0)) .setError(Status.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setReconConfig(ReconConfig.newBuilder().build()) .build(); mockEnterpriseKnowledgeGraphService.addResponse(expectedResponse); String parent = "parent-995424086"; EntityReconciliationJob entityReconciliationJob = EntityReconciliationJob.newBuilder().build(); EntityReconciliationJob actualResponse = client.createEntityReconciliationJob(parent, entityReconciliationJob); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockEnterpriseKnowledgeGraphService.getRequests(); Assert.assertEquals(1, actualRequests.size()); CreateEntityReconciliationJobRequest actualRequest = ((CreateEntityReconciliationJobRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertEquals(entityReconciliationJob, actualRequest.getEntityReconciliationJob()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void createEntityReconciliationJobExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockEnterpriseKnowledgeGraphService.addException(exception); try { String parent = "parent-995424086"; EntityReconciliationJob entityReconciliationJob = EntityReconciliationJob.newBuilder().build(); client.createEntityReconciliationJob(parent, entityReconciliationJob); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getEntityReconciliationJobTest() throws Exception { EntityReconciliationJob expectedResponse = EntityReconciliationJob.newBuilder() .setName( EntityReconciliationJobName.of( "[PROJECT]", "[LOCATION]", "[ENTITY_RECONCILIATION_JOB]") .toString()) .setInputConfig(InputConfig.newBuilder().build()) .setOutputConfig(OutputConfig.newBuilder().build()) .setState(JobState.forNumber(0)) .setError(Status.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setReconConfig(ReconConfig.newBuilder().build()) .build(); mockEnterpriseKnowledgeGraphService.addResponse(expectedResponse); EntityReconciliationJobName name = EntityReconciliationJobName.of("[PROJECT]", "[LOCATION]", "[ENTITY_RECONCILIATION_JOB]"); EntityReconciliationJob actualResponse = client.getEntityReconciliationJob(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockEnterpriseKnowledgeGraphService.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetEntityReconciliationJobRequest actualRequest = ((GetEntityReconciliationJobRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getEntityReconciliationJobExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockEnterpriseKnowledgeGraphService.addException(exception); try { EntityReconciliationJobName name = EntityReconciliationJobName.of("[PROJECT]", "[LOCATION]", "[ENTITY_RECONCILIATION_JOB]"); client.getEntityReconciliationJob(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void getEntityReconciliationJobTest2() throws Exception { EntityReconciliationJob expectedResponse = EntityReconciliationJob.newBuilder() .setName( EntityReconciliationJobName.of( "[PROJECT]", "[LOCATION]", "[ENTITY_RECONCILIATION_JOB]") .toString()) .setInputConfig(InputConfig.newBuilder().build()) .setOutputConfig(OutputConfig.newBuilder().build()) .setState(JobState.forNumber(0)) .setError(Status.newBuilder().build()) .setCreateTime(Timestamp.newBuilder().build()) .setEndTime(Timestamp.newBuilder().build()) .setUpdateTime(Timestamp.newBuilder().build()) .setReconConfig(ReconConfig.newBuilder().build()) .build(); mockEnterpriseKnowledgeGraphService.addResponse(expectedResponse); String name = "name3373707"; EntityReconciliationJob actualResponse = client.getEntityReconciliationJob(name); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockEnterpriseKnowledgeGraphService.getRequests(); Assert.assertEquals(1, actualRequests.size()); GetEntityReconciliationJobRequest actualRequest = ((GetEntityReconciliationJobRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void getEntityReconciliationJobExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockEnterpriseKnowledgeGraphService.addException(exception); try { String name = "name3373707"; client.getEntityReconciliationJob(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listEntityReconciliationJobsTest() throws Exception { EntityReconciliationJob responsesElement = EntityReconciliationJob.newBuilder().build(); ListEntityReconciliationJobsResponse expectedResponse = ListEntityReconciliationJobsResponse.newBuilder() .setNextPageToken("") .addAllEntityReconciliationJobs(Arrays.asList(responsesElement)) .build(); mockEnterpriseKnowledgeGraphService.addResponse(expectedResponse); LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); ListEntityReconciliationJobsPagedResponse pagedListResponse = client.listEntityReconciliationJobs(parent); List<EntityReconciliationJob> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals( expectedResponse.getEntityReconciliationJobsList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockEnterpriseKnowledgeGraphService.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListEntityReconciliationJobsRequest actualRequest = ((ListEntityReconciliationJobsRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listEntityReconciliationJobsExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockEnterpriseKnowledgeGraphService.addException(exception); try { LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); client.listEntityReconciliationJobs(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void listEntityReconciliationJobsTest2() throws Exception { EntityReconciliationJob responsesElement = EntityReconciliationJob.newBuilder().build(); ListEntityReconciliationJobsResponse expectedResponse = ListEntityReconciliationJobsResponse.newBuilder() .setNextPageToken("") .addAllEntityReconciliationJobs(Arrays.asList(responsesElement)) .build(); mockEnterpriseKnowledgeGraphService.addResponse(expectedResponse); String parent = "parent-995424086"; ListEntityReconciliationJobsPagedResponse pagedListResponse = client.listEntityReconciliationJobs(parent); List<EntityReconciliationJob> resources = Lists.newArrayList(pagedListResponse.iterateAll()); Assert.assertEquals(1, resources.size()); Assert.assertEquals( expectedResponse.getEntityReconciliationJobsList().get(0), resources.get(0)); List<AbstractMessage> actualRequests = mockEnterpriseKnowledgeGraphService.getRequests(); Assert.assertEquals(1, actualRequests.size()); ListEntityReconciliationJobsRequest actualRequest = ((ListEntityReconciliationJobsRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void listEntityReconciliationJobsExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockEnterpriseKnowledgeGraphService.addException(exception); try { String parent = "parent-995424086"; client.listEntityReconciliationJobs(parent); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void cancelEntityReconciliationJobTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); mockEnterpriseKnowledgeGraphService.addResponse(expectedResponse); EntityReconciliationJobName name = EntityReconciliationJobName.of("[PROJECT]", "[LOCATION]", "[ENTITY_RECONCILIATION_JOB]"); client.cancelEntityReconciliationJob(name); List<AbstractMessage> actualRequests = mockEnterpriseKnowledgeGraphService.getRequests(); Assert.assertEquals(1, actualRequests.size()); CancelEntityReconciliationJobRequest actualRequest = ((CancelEntityReconciliationJobRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void cancelEntityReconciliationJobExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockEnterpriseKnowledgeGraphService.addException(exception); try { EntityReconciliationJobName name = EntityReconciliationJobName.of("[PROJECT]", "[LOCATION]", "[ENTITY_RECONCILIATION_JOB]"); client.cancelEntityReconciliationJob(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void cancelEntityReconciliationJobTest2() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); mockEnterpriseKnowledgeGraphService.addResponse(expectedResponse); String name = "name3373707"; client.cancelEntityReconciliationJob(name); List<AbstractMessage> actualRequests = mockEnterpriseKnowledgeGraphService.getRequests(); Assert.assertEquals(1, actualRequests.size()); CancelEntityReconciliationJobRequest actualRequest = ((CancelEntityReconciliationJobRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void cancelEntityReconciliationJobExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockEnterpriseKnowledgeGraphService.addException(exception); try { String name = "name3373707"; client.cancelEntityReconciliationJob(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void deleteEntityReconciliationJobTest() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); mockEnterpriseKnowledgeGraphService.addResponse(expectedResponse); EntityReconciliationJobName name = EntityReconciliationJobName.of("[PROJECT]", "[LOCATION]", "[ENTITY_RECONCILIATION_JOB]"); client.deleteEntityReconciliationJob(name); List<AbstractMessage> actualRequests = mockEnterpriseKnowledgeGraphService.getRequests(); Assert.assertEquals(1, actualRequests.size()); DeleteEntityReconciliationJobRequest actualRequest = ((DeleteEntityReconciliationJobRequest) actualRequests.get(0)); Assert.assertEquals(name.toString(), actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void deleteEntityReconciliationJobExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockEnterpriseKnowledgeGraphService.addException(exception); try { EntityReconciliationJobName name = EntityReconciliationJobName.of("[PROJECT]", "[LOCATION]", "[ENTITY_RECONCILIATION_JOB]"); client.deleteEntityReconciliationJob(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void deleteEntityReconciliationJobTest2() throws Exception { Empty expectedResponse = Empty.newBuilder().build(); mockEnterpriseKnowledgeGraphService.addResponse(expectedResponse); String name = "name3373707"; client.deleteEntityReconciliationJob(name); List<AbstractMessage> actualRequests = mockEnterpriseKnowledgeGraphService.getRequests(); Assert.assertEquals(1, actualRequests.size()); DeleteEntityReconciliationJobRequest actualRequest = ((DeleteEntityReconciliationJobRequest) actualRequests.get(0)); Assert.assertEquals(name, actualRequest.getName()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void deleteEntityReconciliationJobExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockEnterpriseKnowledgeGraphService.addException(exception); try { String name = "name3373707"; client.deleteEntityReconciliationJob(name); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void lookupTest() throws Exception { LookupResponse expectedResponse = LookupResponse.newBuilder() .setContext(Value.newBuilder().setBoolValue(true).build()) .setType(Value.newBuilder().setBoolValue(true).build()) .setItemListElement(ListValue.newBuilder().build()) .build(); mockEnterpriseKnowledgeGraphService.addResponse(expectedResponse); LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); List<String> ids = new ArrayList<>(); LookupResponse actualResponse = client.lookup(parent, ids); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockEnterpriseKnowledgeGraphService.getRequests(); Assert.assertEquals(1, actualRequests.size()); LookupRequest actualRequest = ((LookupRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertEquals(ids, actualRequest.getIdsList()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void lookupExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockEnterpriseKnowledgeGraphService.addException(exception); try { LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); List<String> ids = new ArrayList<>(); client.lookup(parent, ids); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void lookupTest2() throws Exception { LookupResponse expectedResponse = LookupResponse.newBuilder() .setContext(Value.newBuilder().setBoolValue(true).build()) .setType(Value.newBuilder().setBoolValue(true).build()) .setItemListElement(ListValue.newBuilder().build()) .build(); mockEnterpriseKnowledgeGraphService.addResponse(expectedResponse); String parent = "parent-995424086"; List<String> ids = new ArrayList<>(); LookupResponse actualResponse = client.lookup(parent, ids); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockEnterpriseKnowledgeGraphService.getRequests(); Assert.assertEquals(1, actualRequests.size()); LookupRequest actualRequest = ((LookupRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertEquals(ids, actualRequest.getIdsList()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void lookupExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockEnterpriseKnowledgeGraphService.addException(exception); try { String parent = "parent-995424086"; List<String> ids = new ArrayList<>(); client.lookup(parent, ids); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void searchTest() throws Exception { SearchResponse expectedResponse = SearchResponse.newBuilder() .setContext(Value.newBuilder().setBoolValue(true).build()) .setType(Value.newBuilder().setBoolValue(true).build()) .setItemListElement(ListValue.newBuilder().build()) .build(); mockEnterpriseKnowledgeGraphService.addResponse(expectedResponse); LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); String query = "query107944136"; SearchResponse actualResponse = client.search(parent, query); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockEnterpriseKnowledgeGraphService.getRequests(); Assert.assertEquals(1, actualRequests.size()); SearchRequest actualRequest = ((SearchRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertEquals(query, actualRequest.getQuery()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void searchExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockEnterpriseKnowledgeGraphService.addException(exception); try { LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); String query = "query107944136"; client.search(parent, query); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void searchTest2() throws Exception { SearchResponse expectedResponse = SearchResponse.newBuilder() .setContext(Value.newBuilder().setBoolValue(true).build()) .setType(Value.newBuilder().setBoolValue(true).build()) .setItemListElement(ListValue.newBuilder().build()) .build(); mockEnterpriseKnowledgeGraphService.addResponse(expectedResponse); String parent = "parent-995424086"; String query = "query107944136"; SearchResponse actualResponse = client.search(parent, query); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockEnterpriseKnowledgeGraphService.getRequests(); Assert.assertEquals(1, actualRequests.size()); SearchRequest actualRequest = ((SearchRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertEquals(query, actualRequest.getQuery()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void searchExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockEnterpriseKnowledgeGraphService.addException(exception); try { String parent = "parent-995424086"; String query = "query107944136"; client.search(parent, query); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void lookupPublicKgTest() throws Exception { LookupPublicKgResponse expectedResponse = LookupPublicKgResponse.newBuilder() .setContext(Value.newBuilder().setBoolValue(true).build()) .setType(Value.newBuilder().setBoolValue(true).build()) .setItemListElement(ListValue.newBuilder().build()) .build(); mockEnterpriseKnowledgeGraphService.addResponse(expectedResponse); LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); List<String> ids = new ArrayList<>(); LookupPublicKgResponse actualResponse = client.lookupPublicKg(parent, ids); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockEnterpriseKnowledgeGraphService.getRequests(); Assert.assertEquals(1, actualRequests.size()); LookupPublicKgRequest actualRequest = ((LookupPublicKgRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertEquals(ids, actualRequest.getIdsList()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void lookupPublicKgExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockEnterpriseKnowledgeGraphService.addException(exception); try { LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); List<String> ids = new ArrayList<>(); client.lookupPublicKg(parent, ids); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void lookupPublicKgTest2() throws Exception { LookupPublicKgResponse expectedResponse = LookupPublicKgResponse.newBuilder() .setContext(Value.newBuilder().setBoolValue(true).build()) .setType(Value.newBuilder().setBoolValue(true).build()) .setItemListElement(ListValue.newBuilder().build()) .build(); mockEnterpriseKnowledgeGraphService.addResponse(expectedResponse); String parent = "parent-995424086"; List<String> ids = new ArrayList<>(); LookupPublicKgResponse actualResponse = client.lookupPublicKg(parent, ids); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockEnterpriseKnowledgeGraphService.getRequests(); Assert.assertEquals(1, actualRequests.size()); LookupPublicKgRequest actualRequest = ((LookupPublicKgRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertEquals(ids, actualRequest.getIdsList()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void lookupPublicKgExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockEnterpriseKnowledgeGraphService.addException(exception); try { String parent = "parent-995424086"; List<String> ids = new ArrayList<>(); client.lookupPublicKg(parent, ids); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void searchPublicKgTest() throws Exception { SearchPublicKgResponse expectedResponse = SearchPublicKgResponse.newBuilder() .setContext(Value.newBuilder().setBoolValue(true).build()) .setType(Value.newBuilder().setBoolValue(true).build()) .setItemListElement(ListValue.newBuilder().build()) .build(); mockEnterpriseKnowledgeGraphService.addResponse(expectedResponse); LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); String query = "query107944136"; SearchPublicKgResponse actualResponse = client.searchPublicKg(parent, query); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockEnterpriseKnowledgeGraphService.getRequests(); Assert.assertEquals(1, actualRequests.size()); SearchPublicKgRequest actualRequest = ((SearchPublicKgRequest) actualRequests.get(0)); Assert.assertEquals(parent.toString(), actualRequest.getParent()); Assert.assertEquals(query, actualRequest.getQuery()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void searchPublicKgExceptionTest() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockEnterpriseKnowledgeGraphService.addException(exception); try { LocationName parent = LocationName.of("[PROJECT]", "[LOCATION]"); String query = "query107944136"; client.searchPublicKg(parent, query); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } @Test public void searchPublicKgTest2() throws Exception { SearchPublicKgResponse expectedResponse = SearchPublicKgResponse.newBuilder() .setContext(Value.newBuilder().setBoolValue(true).build()) .setType(Value.newBuilder().setBoolValue(true).build()) .setItemListElement(ListValue.newBuilder().build()) .build(); mockEnterpriseKnowledgeGraphService.addResponse(expectedResponse); String parent = "parent-995424086"; String query = "query107944136"; SearchPublicKgResponse actualResponse = client.searchPublicKg(parent, query); Assert.assertEquals(expectedResponse, actualResponse); List<AbstractMessage> actualRequests = mockEnterpriseKnowledgeGraphService.getRequests(); Assert.assertEquals(1, actualRequests.size()); SearchPublicKgRequest actualRequest = ((SearchPublicKgRequest) actualRequests.get(0)); Assert.assertEquals(parent, actualRequest.getParent()); Assert.assertEquals(query, actualRequest.getQuery()); Assert.assertTrue( channelProvider.isHeaderSent( ApiClientHeaderProvider.getDefaultApiClientHeaderKey(), GaxGrpcProperties.getDefaultApiClientHeaderPattern())); } @Test public void searchPublicKgExceptionTest2() throws Exception { StatusRuntimeException exception = new StatusRuntimeException(io.grpc.Status.INVALID_ARGUMENT); mockEnterpriseKnowledgeGraphService.addException(exception); try { String parent = "parent-995424086"; String query = "query107944136"; client.searchPublicKg(parent, query); Assert.fail("No exception raised"); } catch (InvalidArgumentException e) { // Expected exception. } } }
googleapis/google-cloud-java
36,163
java-valkey/google-cloud-valkey/src/main/java/com/google/cloud/memorystore/v1beta/stub/HttpJsonMemorystoreStub.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.memorystore.v1beta.stub; import static com.google.cloud.memorystore.v1beta.MemorystoreClient.ListInstancesPagedResponse; import static com.google.cloud.memorystore.v1beta.MemorystoreClient.ListLocationsPagedResponse; import com.google.api.HttpRule; import com.google.api.core.BetaApi; import com.google.api.core.InternalApi; import com.google.api.gax.core.BackgroundResource; import com.google.api.gax.core.BackgroundResourceAggregation; import com.google.api.gax.httpjson.ApiMethodDescriptor; import com.google.api.gax.httpjson.HttpJsonCallSettings; import com.google.api.gax.httpjson.HttpJsonOperationSnapshot; import com.google.api.gax.httpjson.HttpJsonStubCallableFactory; import com.google.api.gax.httpjson.ProtoMessageRequestFormatter; import com.google.api.gax.httpjson.ProtoMessageResponseParser; import com.google.api.gax.httpjson.ProtoRestSerializer; import com.google.api.gax.httpjson.longrunning.stub.HttpJsonOperationsStub; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallable; import com.google.api.gax.rpc.RequestParamsBuilder; import com.google.api.gax.rpc.UnaryCallable; import com.google.cloud.location.GetLocationRequest; import com.google.cloud.location.ListLocationsRequest; import com.google.cloud.location.ListLocationsResponse; import com.google.cloud.location.Location; import com.google.cloud.memorystore.v1beta.CertificateAuthority; import com.google.cloud.memorystore.v1beta.CreateInstanceRequest; import com.google.cloud.memorystore.v1beta.DeleteInstanceRequest; import com.google.cloud.memorystore.v1beta.GetCertificateAuthorityRequest; import com.google.cloud.memorystore.v1beta.GetInstanceRequest; import com.google.cloud.memorystore.v1beta.Instance; import com.google.cloud.memorystore.v1beta.ListInstancesRequest; import com.google.cloud.memorystore.v1beta.ListInstancesResponse; import com.google.cloud.memorystore.v1beta.OperationMetadata; import com.google.cloud.memorystore.v1beta.UpdateInstanceRequest; import com.google.common.collect.ImmutableMap; import com.google.longrunning.Operation; import com.google.protobuf.Empty; import com.google.protobuf.TypeRegistry; import java.io.IOException; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.Map; import java.util.concurrent.TimeUnit; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * REST stub implementation for the Memorystore service API. * * <p>This class is for advanced usage and reflects the underlying API directly. */ @BetaApi @Generated("by gapic-generator-java") public class HttpJsonMemorystoreStub extends MemorystoreStub { private static final TypeRegistry typeRegistry = TypeRegistry.newBuilder() .add(Empty.getDescriptor()) .add(Instance.getDescriptor()) .add(OperationMetadata.getDescriptor()) .build(); private static final ApiMethodDescriptor<ListInstancesRequest, ListInstancesResponse> listInstancesMethodDescriptor = ApiMethodDescriptor.<ListInstancesRequest, ListInstancesResponse>newBuilder() .setFullMethodName("google.cloud.memorystore.v1beta.Memorystore/ListInstances") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<ListInstancesRequest>newBuilder() .setPath( "/v1beta/{parent=projects/*/locations/*}/instances", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<ListInstancesRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "parent", request.getParent()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<ListInstancesRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "filter", request.getFilter()); serializer.putQueryParam(fields, "orderBy", request.getOrderBy()); serializer.putQueryParam(fields, "pageSize", request.getPageSize()); serializer.putQueryParam(fields, "pageToken", request.getPageToken()); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<ListInstancesResponse>newBuilder() .setDefaultInstance(ListInstancesResponse.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<GetInstanceRequest, Instance> getInstanceMethodDescriptor = ApiMethodDescriptor.<GetInstanceRequest, Instance>newBuilder() .setFullMethodName("google.cloud.memorystore.v1beta.Memorystore/GetInstance") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<GetInstanceRequest>newBuilder() .setPath( "/v1beta/{name=projects/*/locations/*/instances/*}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<GetInstanceRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "name", request.getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<GetInstanceRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<Instance>newBuilder() .setDefaultInstance(Instance.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<CreateInstanceRequest, Operation> createInstanceMethodDescriptor = ApiMethodDescriptor.<CreateInstanceRequest, Operation>newBuilder() .setFullMethodName("google.cloud.memorystore.v1beta.Memorystore/CreateInstance") .setHttpMethod("POST") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<CreateInstanceRequest>newBuilder() .setPath( "/v1beta/{parent=projects/*/locations/*}/instances", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<CreateInstanceRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "parent", request.getParent()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<CreateInstanceRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "instanceId", request.getInstanceId()); serializer.putQueryParam(fields, "requestId", request.getRequestId()); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody("instance", request.getInstance(), true)) .build()) .setResponseParser( ProtoMessageResponseParser.<Operation>newBuilder() .setDefaultInstance(Operation.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .setOperationSnapshotFactory( (CreateInstanceRequest request, Operation response) -> HttpJsonOperationSnapshot.create(response)) .build(); private static final ApiMethodDescriptor<UpdateInstanceRequest, Operation> updateInstanceMethodDescriptor = ApiMethodDescriptor.<UpdateInstanceRequest, Operation>newBuilder() .setFullMethodName("google.cloud.memorystore.v1beta.Memorystore/UpdateInstance") .setHttpMethod("PATCH") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<UpdateInstanceRequest>newBuilder() .setPath( "/v1beta/{instance.name=projects/*/locations/*/instances/*}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<UpdateInstanceRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam( fields, "instance.name", request.getInstance().getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<UpdateInstanceRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "requestId", request.getRequestId()); serializer.putQueryParam(fields, "updateMask", request.getUpdateMask()); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor( request -> ProtoRestSerializer.create() .toBody("instance", request.getInstance(), true)) .build()) .setResponseParser( ProtoMessageResponseParser.<Operation>newBuilder() .setDefaultInstance(Operation.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .setOperationSnapshotFactory( (UpdateInstanceRequest request, Operation response) -> HttpJsonOperationSnapshot.create(response)) .build(); private static final ApiMethodDescriptor<DeleteInstanceRequest, Operation> deleteInstanceMethodDescriptor = ApiMethodDescriptor.<DeleteInstanceRequest, Operation>newBuilder() .setFullMethodName("google.cloud.memorystore.v1beta.Memorystore/DeleteInstance") .setHttpMethod("DELETE") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<DeleteInstanceRequest>newBuilder() .setPath( "/v1beta/{name=projects/*/locations/*/instances/*}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<DeleteInstanceRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "name", request.getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<DeleteInstanceRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "requestId", request.getRequestId()); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<Operation>newBuilder() .setDefaultInstance(Operation.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .setOperationSnapshotFactory( (DeleteInstanceRequest request, Operation response) -> HttpJsonOperationSnapshot.create(response)) .build(); private static final ApiMethodDescriptor<GetCertificateAuthorityRequest, CertificateAuthority> getCertificateAuthorityMethodDescriptor = ApiMethodDescriptor.<GetCertificateAuthorityRequest, CertificateAuthority>newBuilder() .setFullMethodName( "google.cloud.memorystore.v1beta.Memorystore/GetCertificateAuthority") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<GetCertificateAuthorityRequest>newBuilder() .setPath( "/v1beta/{name=projects/*/locations/*/instances/*}/certificateAuthority", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<GetCertificateAuthorityRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "name", request.getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<GetCertificateAuthorityRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<CertificateAuthority>newBuilder() .setDefaultInstance(CertificateAuthority.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<ListLocationsRequest, ListLocationsResponse> listLocationsMethodDescriptor = ApiMethodDescriptor.<ListLocationsRequest, ListLocationsResponse>newBuilder() .setFullMethodName("google.cloud.location.Locations/ListLocations") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<ListLocationsRequest>newBuilder() .setPath( "/v1beta/{name=projects/*}/locations", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<ListLocationsRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "name", request.getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<ListLocationsRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<ListLocationsResponse>newBuilder() .setDefaultInstance(ListLocationsResponse.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private static final ApiMethodDescriptor<GetLocationRequest, Location> getLocationMethodDescriptor = ApiMethodDescriptor.<GetLocationRequest, Location>newBuilder() .setFullMethodName("google.cloud.location.Locations/GetLocation") .setHttpMethod("GET") .setType(ApiMethodDescriptor.MethodType.UNARY) .setRequestFormatter( ProtoMessageRequestFormatter.<GetLocationRequest>newBuilder() .setPath( "/v1beta/{name=projects/*/locations/*}", request -> { Map<String, String> fields = new HashMap<>(); ProtoRestSerializer<GetLocationRequest> serializer = ProtoRestSerializer.create(); serializer.putPathParam(fields, "name", request.getName()); return fields; }) .setQueryParamsExtractor( request -> { Map<String, List<String>> fields = new HashMap<>(); ProtoRestSerializer<GetLocationRequest> serializer = ProtoRestSerializer.create(); serializer.putQueryParam(fields, "$alt", "json;enum-encoding=int"); return fields; }) .setRequestBodyExtractor(request -> null) .build()) .setResponseParser( ProtoMessageResponseParser.<Location>newBuilder() .setDefaultInstance(Location.getDefaultInstance()) .setDefaultTypeRegistry(typeRegistry) .build()) .build(); private final UnaryCallable<ListInstancesRequest, ListInstancesResponse> listInstancesCallable; private final UnaryCallable<ListInstancesRequest, ListInstancesPagedResponse> listInstancesPagedCallable; private final UnaryCallable<GetInstanceRequest, Instance> getInstanceCallable; private final UnaryCallable<CreateInstanceRequest, Operation> createInstanceCallable; private final OperationCallable<CreateInstanceRequest, Instance, OperationMetadata> createInstanceOperationCallable; private final UnaryCallable<UpdateInstanceRequest, Operation> updateInstanceCallable; private final OperationCallable<UpdateInstanceRequest, Instance, OperationMetadata> updateInstanceOperationCallable; private final UnaryCallable<DeleteInstanceRequest, Operation> deleteInstanceCallable; private final OperationCallable<DeleteInstanceRequest, Empty, OperationMetadata> deleteInstanceOperationCallable; private final UnaryCallable<GetCertificateAuthorityRequest, CertificateAuthority> getCertificateAuthorityCallable; private final UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable; private final UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse> listLocationsPagedCallable; private final UnaryCallable<GetLocationRequest, Location> getLocationCallable; private final BackgroundResource backgroundResources; private final HttpJsonOperationsStub httpJsonOperationsStub; private final HttpJsonStubCallableFactory callableFactory; public static final HttpJsonMemorystoreStub create(MemorystoreStubSettings settings) throws IOException { return new HttpJsonMemorystoreStub(settings, ClientContext.create(settings)); } public static final HttpJsonMemorystoreStub create(ClientContext clientContext) throws IOException { return new HttpJsonMemorystoreStub(MemorystoreStubSettings.newBuilder().build(), clientContext); } public static final HttpJsonMemorystoreStub create( ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException { return new HttpJsonMemorystoreStub( MemorystoreStubSettings.newBuilder().build(), clientContext, callableFactory); } /** * Constructs an instance of HttpJsonMemorystoreStub, using the given settings. This is protected * so that it is easy to make a subclass, but otherwise, the static factory methods should be * preferred. */ protected HttpJsonMemorystoreStub(MemorystoreStubSettings settings, ClientContext clientContext) throws IOException { this(settings, clientContext, new HttpJsonMemorystoreCallableFactory()); } /** * Constructs an instance of HttpJsonMemorystoreStub, using the given settings. This is protected * so that it is easy to make a subclass, but otherwise, the static factory methods should be * preferred. */ protected HttpJsonMemorystoreStub( MemorystoreStubSettings settings, ClientContext clientContext, HttpJsonStubCallableFactory callableFactory) throws IOException { this.callableFactory = callableFactory; this.httpJsonOperationsStub = HttpJsonOperationsStub.create( clientContext, callableFactory, typeRegistry, ImmutableMap.<String, HttpRule>builder() .put( "google.longrunning.Operations.CancelOperation", HttpRule.newBuilder() .setPost("/v1beta/{name=projects/*/locations/*/operations/*}:cancel") .build()) .put( "google.longrunning.Operations.DeleteOperation", HttpRule.newBuilder() .setDelete("/v1beta/{name=projects/*/locations/*/operations/*}") .build()) .put( "google.longrunning.Operations.GetOperation", HttpRule.newBuilder() .setGet("/v1beta/{name=projects/*/locations/*/operations/*}") .build()) .put( "google.longrunning.Operations.ListOperations", HttpRule.newBuilder() .setGet("/v1beta/{name=projects/*/locations/*}/operations") .build()) .build()); HttpJsonCallSettings<ListInstancesRequest, ListInstancesResponse> listInstancesTransportSettings = HttpJsonCallSettings.<ListInstancesRequest, ListInstancesResponse>newBuilder() .setMethodDescriptor(listInstancesMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); HttpJsonCallSettings<GetInstanceRequest, Instance> getInstanceTransportSettings = HttpJsonCallSettings.<GetInstanceRequest, Instance>newBuilder() .setMethodDescriptor(getInstanceMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); HttpJsonCallSettings<CreateInstanceRequest, Operation> createInstanceTransportSettings = HttpJsonCallSettings.<CreateInstanceRequest, Operation>newBuilder() .setMethodDescriptor(createInstanceMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("parent", String.valueOf(request.getParent())); return builder.build(); }) .build(); HttpJsonCallSettings<UpdateInstanceRequest, Operation> updateInstanceTransportSettings = HttpJsonCallSettings.<UpdateInstanceRequest, Operation>newBuilder() .setMethodDescriptor(updateInstanceMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("instance.name", String.valueOf(request.getInstance().getName())); return builder.build(); }) .build(); HttpJsonCallSettings<DeleteInstanceRequest, Operation> deleteInstanceTransportSettings = HttpJsonCallSettings.<DeleteInstanceRequest, Operation>newBuilder() .setMethodDescriptor(deleteInstanceMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); HttpJsonCallSettings<GetCertificateAuthorityRequest, CertificateAuthority> getCertificateAuthorityTransportSettings = HttpJsonCallSettings.<GetCertificateAuthorityRequest, CertificateAuthority>newBuilder() .setMethodDescriptor(getCertificateAuthorityMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); HttpJsonCallSettings<ListLocationsRequest, ListLocationsResponse> listLocationsTransportSettings = HttpJsonCallSettings.<ListLocationsRequest, ListLocationsResponse>newBuilder() .setMethodDescriptor(listLocationsMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); HttpJsonCallSettings<GetLocationRequest, Location> getLocationTransportSettings = HttpJsonCallSettings.<GetLocationRequest, Location>newBuilder() .setMethodDescriptor(getLocationMethodDescriptor) .setTypeRegistry(typeRegistry) .setParamsExtractor( request -> { RequestParamsBuilder builder = RequestParamsBuilder.create(); builder.add("name", String.valueOf(request.getName())); return builder.build(); }) .build(); this.listInstancesCallable = callableFactory.createUnaryCallable( listInstancesTransportSettings, settings.listInstancesSettings(), clientContext); this.listInstancesPagedCallable = callableFactory.createPagedCallable( listInstancesTransportSettings, settings.listInstancesSettings(), clientContext); this.getInstanceCallable = callableFactory.createUnaryCallable( getInstanceTransportSettings, settings.getInstanceSettings(), clientContext); this.createInstanceCallable = callableFactory.createUnaryCallable( createInstanceTransportSettings, settings.createInstanceSettings(), clientContext); this.createInstanceOperationCallable = callableFactory.createOperationCallable( createInstanceTransportSettings, settings.createInstanceOperationSettings(), clientContext, httpJsonOperationsStub); this.updateInstanceCallable = callableFactory.createUnaryCallable( updateInstanceTransportSettings, settings.updateInstanceSettings(), clientContext); this.updateInstanceOperationCallable = callableFactory.createOperationCallable( updateInstanceTransportSettings, settings.updateInstanceOperationSettings(), clientContext, httpJsonOperationsStub); this.deleteInstanceCallable = callableFactory.createUnaryCallable( deleteInstanceTransportSettings, settings.deleteInstanceSettings(), clientContext); this.deleteInstanceOperationCallable = callableFactory.createOperationCallable( deleteInstanceTransportSettings, settings.deleteInstanceOperationSettings(), clientContext, httpJsonOperationsStub); this.getCertificateAuthorityCallable = callableFactory.createUnaryCallable( getCertificateAuthorityTransportSettings, settings.getCertificateAuthoritySettings(), clientContext); this.listLocationsCallable = callableFactory.createUnaryCallable( listLocationsTransportSettings, settings.listLocationsSettings(), clientContext); this.listLocationsPagedCallable = callableFactory.createPagedCallable( listLocationsTransportSettings, settings.listLocationsSettings(), clientContext); this.getLocationCallable = callableFactory.createUnaryCallable( getLocationTransportSettings, settings.getLocationSettings(), clientContext); this.backgroundResources = new BackgroundResourceAggregation(clientContext.getBackgroundResources()); } @InternalApi public static List<ApiMethodDescriptor> getMethodDescriptors() { List<ApiMethodDescriptor> methodDescriptors = new ArrayList<>(); methodDescriptors.add(listInstancesMethodDescriptor); methodDescriptors.add(getInstanceMethodDescriptor); methodDescriptors.add(createInstanceMethodDescriptor); methodDescriptors.add(updateInstanceMethodDescriptor); methodDescriptors.add(deleteInstanceMethodDescriptor); methodDescriptors.add(getCertificateAuthorityMethodDescriptor); methodDescriptors.add(listLocationsMethodDescriptor); methodDescriptors.add(getLocationMethodDescriptor); return methodDescriptors; } public HttpJsonOperationsStub getHttpJsonOperationsStub() { return httpJsonOperationsStub; } @Override public UnaryCallable<ListInstancesRequest, ListInstancesResponse> listInstancesCallable() { return listInstancesCallable; } @Override public UnaryCallable<ListInstancesRequest, ListInstancesPagedResponse> listInstancesPagedCallable() { return listInstancesPagedCallable; } @Override public UnaryCallable<GetInstanceRequest, Instance> getInstanceCallable() { return getInstanceCallable; } @Override public UnaryCallable<CreateInstanceRequest, Operation> createInstanceCallable() { return createInstanceCallable; } @Override public OperationCallable<CreateInstanceRequest, Instance, OperationMetadata> createInstanceOperationCallable() { return createInstanceOperationCallable; } @Override public UnaryCallable<UpdateInstanceRequest, Operation> updateInstanceCallable() { return updateInstanceCallable; } @Override public OperationCallable<UpdateInstanceRequest, Instance, OperationMetadata> updateInstanceOperationCallable() { return updateInstanceOperationCallable; } @Override public UnaryCallable<DeleteInstanceRequest, Operation> deleteInstanceCallable() { return deleteInstanceCallable; } @Override public OperationCallable<DeleteInstanceRequest, Empty, OperationMetadata> deleteInstanceOperationCallable() { return deleteInstanceOperationCallable; } @Override public UnaryCallable<GetCertificateAuthorityRequest, CertificateAuthority> getCertificateAuthorityCallable() { return getCertificateAuthorityCallable; } @Override public UnaryCallable<ListLocationsRequest, ListLocationsResponse> listLocationsCallable() { return listLocationsCallable; } @Override public UnaryCallable<ListLocationsRequest, ListLocationsPagedResponse> listLocationsPagedCallable() { return listLocationsPagedCallable; } @Override public UnaryCallable<GetLocationRequest, Location> getLocationCallable() { return getLocationCallable; } @Override public final void close() { try { backgroundResources.close(); } catch (RuntimeException e) { throw e; } catch (Exception e) { throw new IllegalStateException("Failed to close resource", e); } } @Override public void shutdown() { backgroundResources.shutdown(); } @Override public boolean isShutdown() { return backgroundResources.isShutdown(); } @Override public boolean isTerminated() { return backgroundResources.isTerminated(); } @Override public void shutdownNow() { backgroundResources.shutdownNow(); } @Override public boolean awaitTermination(long duration, TimeUnit unit) throws InterruptedException { return backgroundResources.awaitTermination(duration, unit); } }
apache/pulsar
35,963
pulsar-broker/src/main/java/org/apache/pulsar/broker/admin/v2/NonPersistentTopics.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.pulsar.broker.admin.v2; import io.swagger.annotations.Api; import io.swagger.annotations.ApiOperation; import io.swagger.annotations.ApiParam; import io.swagger.annotations.ApiResponse; import io.swagger.annotations.ApiResponses; import java.util.ArrayList; import java.util.Collections; import java.util.List; import java.util.concurrent.CompletableFuture; import java.util.concurrent.ExecutionException; import java.util.concurrent.TimeUnit; import java.util.concurrent.TimeoutException; import java.util.stream.Collectors; import javax.ws.rs.DELETE; import javax.ws.rs.DefaultValue; import javax.ws.rs.Encoded; import javax.ws.rs.GET; import javax.ws.rs.POST; import javax.ws.rs.PUT; import javax.ws.rs.Path; import javax.ws.rs.PathParam; import javax.ws.rs.Produces; import javax.ws.rs.QueryParam; import javax.ws.rs.WebApplicationException; import javax.ws.rs.container.AsyncResponse; import javax.ws.rs.container.Suspended; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import org.apache.commons.lang3.StringUtils; import org.apache.pulsar.broker.PulsarServerException; import org.apache.pulsar.broker.service.Topic; import org.apache.pulsar.broker.web.RestException; import org.apache.pulsar.common.naming.TopicName; import org.apache.pulsar.common.partition.PartitionedTopicMetadata; import org.apache.pulsar.common.policies.data.EntryFilters; import org.apache.pulsar.common.policies.data.NamespaceOperation; import org.apache.pulsar.common.policies.data.PersistentTopicInternalStats; import org.apache.pulsar.common.policies.data.Policies; import org.apache.pulsar.common.policies.data.TopicOperation; import org.apache.pulsar.common.policies.data.TopicStats; import org.apache.pulsar.common.policies.data.stats.NonPersistentPartitionedTopicStatsImpl; import org.apache.pulsar.common.policies.data.stats.NonPersistentTopicStatsImpl; import org.apache.pulsar.common.util.FutureUtil; import org.slf4j.Logger; import org.slf4j.LoggerFactory; /** */ @Path("/non-persistent") @Produces(MediaType.APPLICATION_JSON) @Api(value = "/non-persistent", description = "Non-Persistent topic admin apis", tags = "non-persistent topic") public class NonPersistentTopics extends PersistentTopics { private static final Logger log = LoggerFactory.getLogger(NonPersistentTopics.class); @GET @Path("/{tenant}/{namespace}/{topic}/partitions") @ApiOperation(value = "Get partitioned topic metadata.", response = PartitionedTopicMetadata.class) @ApiResponses(value = { @ApiResponse(code = 307, message = "Current broker doesn't serve the namespace of this topic"), @ApiResponse(code = 401, message = "Don't have permission to manage resources on this tenant"), @ApiResponse(code = 403, message = "Don't have admin permission"), @ApiResponse(code = 404, message = "The tenant/namespace/topic does not exist"), @ApiResponse(code = 412, message = "Topic name is not valid"), @ApiResponse(code = 500, message = "Internal server error"), @ApiResponse(code = 503, message = "Failed to validate cluster configuration") }) public void getPartitionedMetadata( @Suspended final AsyncResponse asyncResponse, @ApiParam(value = "Specify the tenant", required = true) @PathParam("tenant") String tenant, @ApiParam(value = "Specify the namespace", required = true) @PathParam("namespace") String namespace, @ApiParam(value = "Specify topic name", required = true) @PathParam("topic") @Encoded String encodedTopic, @ApiParam(value = "Whether leader broker redirected this call to this broker. For internal use.") @QueryParam("authoritative") @DefaultValue("false") boolean authoritative, @ApiParam(value = "Is check configuration required to automatically create topic") @QueryParam("checkAllowAutoCreation") @DefaultValue("false") boolean checkAllowAutoCreation) { validateTopicName(tenant, namespace, encodedTopic); validateTopicOwnershipAsync(topicName, authoritative).whenComplete((__, ex) -> { if (ex != null) { Throwable actEx = FutureUtil.unwrapCompletionException(ex); if (isNot307And404Exception(actEx)) { log.error("[{}] Failed to get internal stats for topic {}", clientAppId(), topicName, ex); } resumeAsyncResponseExceptionally(asyncResponse, actEx); } else { // "super.getPartitionedMetadata" will handle error itself. super.getPartitionedMetadata(asyncResponse, tenant, namespace, encodedTopic, authoritative, checkAllowAutoCreation); } }); } @GET @Path("{tenant}/{namespace}/{topic}/internalStats") @ApiOperation(value = "Get the internal stats for the topic.", response = PersistentTopicInternalStats.class) @ApiResponses(value = { @ApiResponse(code = 307, message = "Current broker doesn't serve the namespace of this topic"), @ApiResponse(code = 401, message = "Don't have permission to manage resources on this tenant"), @ApiResponse(code = 403, message = "Don't have admin permission"), @ApiResponse(code = 404, message = "The tenant/namespace/topic does not exist"), @ApiResponse(code = 412, message = "Topic name is not valid"), @ApiResponse(code = 500, message = "Internal server error"), }) public void getInternalStats( @Suspended final AsyncResponse asyncResponse, @ApiParam(value = "Specify the tenant", required = true) @PathParam("tenant") String tenant, @ApiParam(value = "Specify the namespace", required = true) @PathParam("namespace") String namespace, @ApiParam(value = "Specify topic name", required = true) @PathParam("topic") @Encoded String encodedTopic, @ApiParam(value = "Whether leader broker redirected this call to this broker. For internal use.") @QueryParam("authoritative") @DefaultValue("false") boolean authoritative, @QueryParam("metadata") @DefaultValue("false") boolean metadata) { validateTopicName(tenant, namespace, encodedTopic); validateTopicOwnershipAsync(topicName, authoritative) .thenCompose(__ -> validateTopicOperationAsync(topicName, TopicOperation.GET_STATS)) .thenCompose(__ -> { Topic topic = getTopicReference(topicName); boolean includeMetadata = metadata && hasSuperUserAccess(); return topic.getInternalStats(includeMetadata); }) .thenAccept(asyncResponse::resume) .exceptionally(ex -> { if (isNot307And404Exception(ex)) { log.error("[{}] Failed to get internal stats for topic {}", clientAppId(), topicName, ex); } resumeAsyncResponseExceptionally(asyncResponse, ex); return null; }); } @PUT @Path("/{tenant}/{namespace}/{topic}/partitions") @ApiOperation(value = "Create a partitioned topic.", notes = "It needs to be called before creating a producer on a partitioned topic.") @ApiResponses(value = { @ApiResponse(code = 204, message = "Operation successful"), @ApiResponse(code = 307, message = "Current broker doesn't serve the namespace of this topic"), @ApiResponse(code = 401, message = "Don't have permission to manage resources on this tenant"), @ApiResponse(code = 403, message = "Don't have admin permission"), @ApiResponse(code = 404, message = "The tenant/namespace does not exist"), @ApiResponse(code = 406, message = "The number of partitions should be more than 0 and less than" + " or equal to maxNumPartitionsPerPartitionedTopic"), @ApiResponse(code = 409, message = "Partitioned topic already exists"), @ApiResponse(code = 412, message = "Failed Reason : Name is invalid or " + "Namespace does not have any clusters configured"), @ApiResponse(code = 500, message = "Internal server error"), @ApiResponse(code = 503, message = "Failed to validate global cluster configuration"), }) public void createPartitionedTopic( @Suspended final AsyncResponse asyncResponse, @ApiParam(value = "Specify the tenant", required = true) @PathParam("tenant") String tenant, @ApiParam(value = "Specify the namespace", required = true) @PathParam("namespace") String namespace, @ApiParam(value = "Specify topic name", required = true) @PathParam("topic") @Encoded String encodedTopic, @ApiParam(value = "The number of partitions for the topic", required = true, type = "int", defaultValue = "0") int numPartitions, @QueryParam("createLocalTopicOnly") @DefaultValue("false") boolean createLocalTopicOnly) { try { validateNamespaceName(tenant, namespace); validateGlobalNamespaceOwnership(); validateTopicName(tenant, namespace, encodedTopic); internalCreatePartitionedTopic(asyncResponse, numPartitions, createLocalTopicOnly); } catch (Exception e) { log.error("[{}] Failed to create partitioned topic {}", clientAppId(), topicName, e); resumeAsyncResponseExceptionally(asyncResponse, e); } } @GET @Path("{tenant}/{namespace}/{topic}/partitioned-stats") @ApiOperation( value = "Get the stats for the partitioned topic.", response = NonPersistentPartitionedTopicStatsImpl.class ) @ApiResponses(value = { @ApiResponse(code = 307, message = "Current broker doesn't serve the namespace of this topic"), @ApiResponse(code = 401, message = "Don't have permission to administrate resources on this tenant"), @ApiResponse(code = 403, message = "Don't have admin permission"), @ApiResponse(code = 404, message = "Namespace or topic does not exist"), @ApiResponse(code = 412, message = "Partitioned topic name is invalid"), @ApiResponse(code = 500, message = "Internal server error"), @ApiResponse(code = 503, message = "Failed to validate global cluster configuration") }) public void getPartitionedStats( @Suspended final AsyncResponse asyncResponse, @ApiParam(value = "Specify the tenant", required = true) @PathParam("tenant") String tenant, @ApiParam(value = "Specify the namespace", required = true) @PathParam("namespace") String namespace, @ApiParam(value = "Specify topic name", required = true) @PathParam("topic") @Encoded String encodedTopic, @ApiParam(value = "Get per partition stats") @QueryParam("perPartition") @DefaultValue("true") boolean perPartition, @ApiParam(value = "Whether leader broker redirected this call to this broker. For internal use.") @QueryParam("authoritative") @DefaultValue("false") boolean authoritative, @ApiParam(value = "If return precise backlog or imprecise backlog") @QueryParam("getPreciseBacklog") @DefaultValue("false") boolean getPreciseBacklog, @ApiParam(value = "If return backlog size for each subscription, require locking on ledger so be careful " + "not to use when there's heavy traffic.") @QueryParam("subscriptionBacklogSize") @DefaultValue("false") boolean subscriptionBacklogSize, @ApiParam(value = "If return the earliest time in backlog") @QueryParam("getEarliestTimeInBacklog") @DefaultValue("false") boolean getEarliestTimeInBacklog, @ApiParam(value = "If exclude the publishers") @QueryParam("excludePublishers") @DefaultValue("false") boolean excludePublishers, @ApiParam(value = "If exclude the consumers") @QueryParam("excludeConsumers") @DefaultValue("false") boolean excludeConsumers) { try { validateTopicName(tenant, namespace, encodedTopic); if (topicName.isPartitioned()) { throw new RestException(Response.Status.PRECONDITION_FAILED, "Partitioned Topic Name should not contain '-partition-'"); } if (topicName.isGlobal()) { try { validateGlobalNamespaceOwnership(namespaceName); } catch (Exception e) { log.error("[{}] Failed to get partitioned stats for {}", clientAppId(), topicName, e); resumeAsyncResponseExceptionally(asyncResponse, e); return; } } getPartitionedTopicMetadataAsync(topicName, authoritative, false).thenAccept(partitionMetadata -> { if (partitionMetadata.partitions == 0) { asyncResponse.resume(new RestException(Status.NOT_FOUND, String.format("Partitioned topic not found %s", topicName.toString()))); return; } NonPersistentPartitionedTopicStatsImpl stats = new NonPersistentPartitionedTopicStatsImpl(partitionMetadata); List<CompletableFuture<TopicStats>> topicStatsFutureList = new ArrayList<>(); org.apache.pulsar.client.admin.GetStatsOptions statsOptions = new org.apache.pulsar.client.admin.GetStatsOptions( getPreciseBacklog, subscriptionBacklogSize, getEarliestTimeInBacklog, excludePublishers, excludeConsumers ); for (int i = 0; i < partitionMetadata.partitions; i++) { try { topicStatsFutureList .add(pulsar().getAdminClient().topics().getStatsAsync( (topicName.getPartition(i).toString()), statsOptions)); } catch (PulsarServerException e) { asyncResponse.resume(new RestException(e)); return; } } FutureUtil.waitForAll(topicStatsFutureList).handle((result, exception) -> { CompletableFuture<TopicStats> statFuture = null; for (int i = 0; i < topicStatsFutureList.size(); i++) { statFuture = topicStatsFutureList.get(i); if (statFuture.isDone() && !statFuture.isCompletedExceptionally()) { try { stats.add((NonPersistentTopicStatsImpl) statFuture.get()); if (perPartition) { stats.getPartitions().put(topicName.getPartition(i).toString(), (NonPersistentTopicStatsImpl) statFuture.get()); } } catch (Exception e) { asyncResponse.resume(new RestException(e)); return null; } } } if (perPartition && stats.partitions.isEmpty()) { try { boolean topicExists = namespaceResources().getPartitionedTopicResources() .partitionedTopicExists(topicName); if (topicExists) { stats.getPartitions().put(topicName.toString(), new NonPersistentTopicStatsImpl()); } else { asyncResponse.resume( new RestException(Status.NOT_FOUND, "Internal topics have not been generated yet")); return null; } } catch (Exception e) { asyncResponse.resume(new RestException(e)); return null; } } asyncResponse.resume(stats); return null; }); }).exceptionally(ex -> { log.error("[{}] Failed to get partitioned stats for {}", clientAppId(), topicName, ex); resumeAsyncResponseExceptionally(asyncResponse, ex); return null; }); } catch (WebApplicationException wae) { asyncResponse.resume(wae); } catch (Exception e) { asyncResponse.resume(new RestException(e)); } } @PUT @Path("/{tenant}/{namespace}/{topic}/unload") @ApiOperation(value = "Unload a topic") @ApiResponses(value = { @ApiResponse(code = 204, message = "Operation successful"), @ApiResponse(code = 307, message = "Current broker doesn't serve the namespace of this topic"), @ApiResponse(code = 401, message = "This operation requires super-user access"), @ApiResponse(code = 403, message = "Don't have admin permission"), @ApiResponse(code = 404, message = "The tenant/namespace/topic does not exist"), @ApiResponse(code = 412, message = "Topic name is not valid"), @ApiResponse(code = 500, message = "Internal server error"), @ApiResponse(code = 503, message = "Failed to validate global cluster configuration"), }) public void unloadTopic( @Suspended final AsyncResponse asyncResponse, @ApiParam(value = "Specify the tenant", required = true) @PathParam("tenant") String tenant, @ApiParam(value = "Specify the namespace", required = true) @PathParam("namespace") String namespace, @ApiParam(value = "Specify topic name", required = true) @PathParam("topic") @Encoded String encodedTopic, @ApiParam(value = "Whether leader broker redirected this call to this broker. For internal use.") @QueryParam("authoritative") @DefaultValue("false") boolean authoritative) { try { validateTopicName(tenant, namespace, encodedTopic); internalUnloadTopic(asyncResponse, authoritative); } catch (WebApplicationException wae) { asyncResponse.resume(wae); } catch (Exception e) { asyncResponse.resume(new RestException(e)); } } @GET @Path("/{tenant}/{namespace}") @ApiOperation(value = "Get the list of non-persistent topics under a namespace.", response = String.class, responseContainer = "List") @ApiResponses(value = { @ApiResponse(code = 401, message = "Don't have permission to manage resources on this tenant"), @ApiResponse(code = 403, message = "Don't have admin permission"), @ApiResponse(code = 404, message = "The tenant/namespace does not exist"), @ApiResponse(code = 412, message = "Namespace name is not valid"), @ApiResponse(code = 500, message = "Internal server error"), @ApiResponse(code = 503, message = "Failed to validate global cluster configuration"), }) public void getList( @Suspended final AsyncResponse asyncResponse, @ApiParam(value = "Specify the tenant", required = true) @PathParam("tenant") String tenant, @ApiParam(value = "Specify the namespace", required = true) @PathParam("namespace") String namespace, @ApiParam(value = "Specify the bundle name", required = false) @QueryParam("bundle") String nsBundle, @ApiParam(value = "Include system topic") @QueryParam("includeSystemTopic") boolean includeSystemTopic) { Policies policies = null; try { validateNamespaceName(tenant, namespace); if (log.isDebugEnabled()) { log.debug("[{}] list of topics on namespace {}", clientAppId(), namespaceName); } validateNamespaceOperation(namespaceName, NamespaceOperation.GET_TOPICS); policies = getNamespacePolicies(namespaceName); // check cluster ownership for a given global namespace: redirect if peer-cluster owns it validateGlobalNamespaceOwnership(namespaceName); } catch (WebApplicationException wae) { asyncResponse.resume(wae); return; } catch (Exception e) { asyncResponse.resume(new RestException(e)); return; } final List<CompletableFuture<List<String>>> futures = new ArrayList<>(); final List<String> boundaries = policies.bundles.getBoundaries(); for (int i = 0; i < boundaries.size() - 1; i++) { final String bundle = String.format("%s_%s", boundaries.get(i), boundaries.get(i + 1)); if (StringUtils.isNotBlank(nsBundle) && !nsBundle.equals(bundle)) { continue; } try { futures.add(pulsar().getAdminClient().topics().getListInBundleAsync(namespaceName.toString(), bundle)); } catch (PulsarServerException e) { log.error("[{}] Failed to get list of topics under namespace {}/{}", clientAppId(), namespaceName, bundle, e); asyncResponse.resume(new RestException(e)); return; } } FutureUtil.waitForAll(futures).whenComplete((result, ex) -> { if (ex != null) { resumeAsyncResponseExceptionally(asyncResponse, ex); } else { final List<String> topics = new ArrayList<>(); for (int i = 0; i < futures.size(); i++) { List<String> topicList = futures.get(i).join(); if (topicList != null) { topics.addAll(topicList); } } final List<String> nonPersistentTopics = topics.stream() .filter(name -> !TopicName.get(name).isPersistent()) .collect(Collectors.toList()); asyncResponse.resume(filterSystemTopic(nonPersistentTopics, includeSystemTopic)); } }); } @GET @Path("/{tenant}/{namespace}/{bundle}") @ApiOperation(value = "Get the list of non-persistent topics under a namespace bundle.", response = String.class, responseContainer = "List") @ApiResponses(value = { @ApiResponse(code = 401, message = "Don't have permission to manage resources on this tenant"), @ApiResponse(code = 403, message = "Don't have admin permission"), @ApiResponse(code = 404, message = "Namespace doesn't exist"), @ApiResponse(code = 412, message = "Namespace name is not valid"), @ApiResponse(code = 500, message = "Internal server error"), @ApiResponse(code = 503, message = "Failed to validate global cluster configuration"), }) public void getListFromBundle( @Suspended final AsyncResponse asyncResponse, @ApiParam(value = "Specify the tenant", required = true) @PathParam("tenant") String tenant, @ApiParam(value = "Specify the namespace", required = true) @PathParam("namespace") String namespace, @ApiParam(value = "Bundle range of a topic", required = true) @PathParam("bundle") String bundleRange) { validateNamespaceName(tenant, namespace); if (log.isDebugEnabled()) { log.debug("[{}] list of topics on namespace bundle {}/{}", clientAppId(), namespaceName, bundleRange); } validateNamespaceOperation(namespaceName, NamespaceOperation.GET_BUNDLE); Policies policies = getNamespacePolicies(namespaceName); // check cluster ownership for a given global namespace: redirect if peer-cluster owns it validateGlobalNamespaceOwnership(namespaceName); isBundleOwnedByAnyBroker(namespaceName, policies.bundles, bundleRange).thenAccept(flag -> { if (!flag) { log.info("[{}] Namespace bundle is not owned by any broker {}/{}", clientAppId(), namespaceName, bundleRange); asyncResponse.resume(Response.noContent().build()); } else { validateNamespaceBundleOwnershipAsync(namespaceName, policies.bundles, bundleRange, true, true) .thenAccept(nsBundle -> { final var bundleTopics = pulsar().getBrokerService().getMultiLayerTopicsMap() .get(namespaceName.toString()); if (bundleTopics == null || bundleTopics.isEmpty()) { asyncResponse.resume(Collections.emptyList()); return; } final List<String> topicList = new ArrayList<>(); String bundleKey = namespaceName.toString() + "/" + nsBundle.getBundleRange(); final var topicMap = bundleTopics.get(bundleKey); if (topicMap != null) { topicList.addAll(topicMap.keySet().stream() .filter(name -> !TopicName.get(name).isPersistent()) .collect(Collectors.toList())); } asyncResponse.resume(topicList); }).exceptionally(ex -> { if (isNot307And404Exception(ex)) { log.error("[{}] Failed to list topics on namespace bundle {}/{}", clientAppId(), namespaceName, bundleRange, ex); } resumeAsyncResponseExceptionally(asyncResponse, ex); return null; }); } }).exceptionally(ex -> { if (isNot307And404Exception(ex)) { log.error("[{}] Failed to list topics on namespace bundle {}/{}", clientAppId(), namespaceName, bundleRange, ex); } resumeAsyncResponseExceptionally(asyncResponse, ex); return null; }); } @DELETE @Path("/{tenant}/{namespace}/{topic}/truncate") @ApiOperation(value = "Truncate a topic.", notes = "NonPersistentTopic does not support truncate.") @ApiResponses(value = { @ApiResponse(code = 204, message = "Operation successful"), @ApiResponse(code = 412, message = "NonPersistentTopic does not support truncate.") }) public void truncateTopic( @Suspended final AsyncResponse asyncResponse, @ApiParam(value = "Specify the tenant", required = true) @PathParam("tenant") String tenant, @ApiParam(value = "Specify the namespace", required = true) @PathParam("namespace") String namespace, @ApiParam(value = "Specify topic name", required = true) @PathParam("topic") @Encoded String encodedTopic, @ApiParam(value = "Whether leader broker redirected this call to this broker. For internal use.") @QueryParam("authoritative") @DefaultValue("false") boolean authoritative){ asyncResponse.resume(new RestException(Status.PRECONDITION_FAILED.getStatusCode(), "unsupport truncate")); } protected void validateAdminOperationOnTopic(TopicName topicName, boolean authoritative) { validateAdminAccessForTenant(topicName.getTenant()); validateTopicOwnership(topicName, authoritative); } @GET @Path("/{tenant}/{namespace}/{topic}/entryFilters") @ApiOperation(value = "Get entry filters for a topic.", response = EntryFilters.class) @ApiResponses(value = { @ApiResponse(code = 403, message = "Don't have admin permission"), @ApiResponse(code = 404, message = "Tenants or Namespace doesn't exist") }) public void getEntryFilters(@Suspended AsyncResponse asyncResponse, @ApiParam(value = "Specify the tenant", required = true) @PathParam("tenant") String tenant, @ApiParam(value = "Specify the namespace", required = true) @PathParam("namespace") String namespace, @ApiParam(value = "Specify topic name", required = true) @PathParam("topic") @Encoded String encodedTopic, @QueryParam("applied") @DefaultValue("false") boolean applied, @QueryParam("isGlobal") @DefaultValue("false") boolean isGlobal, @ApiParam(value = "Whether leader broker redirected this call to this " + "broker. For internal use.") @QueryParam("authoritative") @DefaultValue("false") boolean authoritative) { validateTopicName(tenant, namespace, encodedTopic); preValidation(authoritative) .thenCompose(__ -> internalGetEntryFilters(applied, isGlobal)) .thenAccept(asyncResponse::resume) .exceptionally(ex -> { handleTopicPolicyException("getEntryFilters", ex, asyncResponse); return null; }); } @POST @Path("/{tenant}/{namespace}/{topic}/entryFilters") @ApiOperation(value = "Set entry filters for specified topic") @ApiResponses(value = { @ApiResponse(code = 204, message = "Operation successful"), @ApiResponse(code = 403, message = "Don't have admin permission"), @ApiResponse(code = 404, message = "Tenant or namespace or topic doesn't exist"), @ApiResponse(code = 405, message = "Topic level policy is disabled, please enable the topic level policy and retry"), @ApiResponse(code = 409, message = "Concurrent modification")}) public void setEntryFilters(@Suspended final AsyncResponse asyncResponse, @PathParam("tenant") String tenant, @PathParam("namespace") String namespace, @PathParam("topic") @Encoded String encodedTopic, @QueryParam("isGlobal") @DefaultValue("false") boolean isGlobal, @ApiParam(value = "Whether leader broker redirected this " + "call to this broker. For internal use.") @QueryParam("authoritative") @DefaultValue("false") boolean authoritative, @ApiParam(value = "Enable sub types for the specified topic") EntryFilters entryFilters) { validateTopicName(tenant, namespace, encodedTopic); preValidation(authoritative) .thenCompose(__ -> internalSetEntryFilters(entryFilters, isGlobal)) .thenAccept(__ -> asyncResponse.resume(Response.noContent().build())) .exceptionally(ex -> { handleTopicPolicyException("setEntryFilters", ex, asyncResponse); return null; }); } @DELETE @Path("/{tenant}/{namespace}/{topic}/entryFilters") @ApiOperation(value = "Remove entry filters for specified topic.") @ApiResponses(value = { @ApiResponse(code = 204, message = "Operation successful"), @ApiResponse(code = 403, message = "Don't have admin permission"), @ApiResponse(code = 404, message = "Tenant or namespace or topic doesn't exist"), @ApiResponse(code = 405, message = "Topic level policy is disabled, please enable the topic level policy and retry"), @ApiResponse(code = 409, message = "Concurrent modification")}) public void removeEntryFilters(@Suspended final AsyncResponse asyncResponse, @PathParam("tenant") String tenant, @PathParam("namespace") String namespace, @PathParam("topic") @Encoded String encodedTopic, @QueryParam("isGlobal") @DefaultValue("false") boolean isGlobal, @ApiParam(value = "Whether leader broker redirected this" + "call to this broker. For internal use.") @QueryParam("authoritative") @DefaultValue("false") boolean authoritative) { validateTopicName(tenant, namespace, encodedTopic); preValidation(authoritative) .thenCompose(__ -> internalRemoveEntryFilters(isGlobal)) .thenRun(() -> { log.info( "[{}] Successfully remove entry filters: tenant={}, namespace={}, topic={}, isGlobal={}", clientAppId(), tenant, namespace, topicName.getLocalName(), isGlobal); asyncResponse.resume(Response.noContent().build()); }) .exceptionally(ex -> { handleTopicPolicyException("removeEntryFilters", ex, asyncResponse); return null; }); } private Topic getTopicReference(TopicName topicName) { try { return pulsar().getBrokerService().getTopicIfExists(topicName.toString()) .get(config().getMetadataStoreOperationTimeoutSeconds(), TimeUnit.SECONDS) .orElseThrow(() -> new RestException(Status.NOT_FOUND, "Topic not found")); } catch (ExecutionException e) { throw new RestException(e.getCause()); } catch (InterruptedException | TimeoutException e) { throw new RestException(e); } } }
hibernate/hibernate-search
33,446
integrationtest/backend/tck/src/main/java/org/hibernate/search/integrationtest/backend/tck/search/projection/SearchProjectionIT.java
/* * SPDX-License-Identifier: Apache-2.0 * Copyright Red Hat Inc. and Hibernate Authors */ package org.hibernate.search.integrationtest.backend.tck.search.projection; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatCode; import static org.assertj.core.api.Assertions.assertThatThrownBy; import static org.hibernate.search.integrationtest.backend.tck.testsupport.stub.MapperMockUtils.expectHitMapping; import static org.hibernate.search.util.impl.integrationtest.common.NormalizationUtils.reference; import static org.hibernate.search.util.impl.integrationtest.common.assertion.SearchResultAssert.assertThatQuery; import static org.hibernate.search.util.impl.integrationtest.common.assertion.SearchResultAssert.assertThatResult; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.when; import java.util.ArrayList; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.function.Function; import org.hibernate.search.engine.backend.common.DocumentReference; import org.hibernate.search.engine.backend.document.DocumentElement; import org.hibernate.search.engine.backend.document.IndexFieldReference; import org.hibernate.search.engine.backend.document.IndexObjectFieldReference; import org.hibernate.search.engine.backend.document.model.dsl.IndexSchemaElement; import org.hibernate.search.engine.backend.document.model.dsl.IndexSchemaObjectField; import org.hibernate.search.engine.backend.types.ObjectStructure; import org.hibernate.search.engine.backend.types.Projectable; import org.hibernate.search.engine.backend.types.dsl.IndexFieldTypeFactory; import org.hibernate.search.engine.backend.types.dsl.StandardIndexFieldTypeOptionsStep; import org.hibernate.search.engine.common.EntityReference; import org.hibernate.search.engine.search.loading.spi.SearchLoadingContext; import org.hibernate.search.engine.search.projection.SearchProjection; import org.hibernate.search.engine.search.projection.dsl.ProjectionFinalStep; import org.hibernate.search.engine.search.projection.dsl.SearchProjectionFactory; import org.hibernate.search.engine.search.projection.dsl.SearchProjectionFactoryExtension; import org.hibernate.search.engine.search.projection.spi.ProjectionMappedTypeContext; import org.hibernate.search.engine.search.query.SearchQuery; import org.hibernate.search.engine.search.query.SearchResult; import org.hibernate.search.integrationtest.backend.tck.testsupport.configuration.DefaultAnalysisDefinitions; import org.hibernate.search.integrationtest.backend.tck.testsupport.stub.StubEntity; import org.hibernate.search.integrationtest.backend.tck.testsupport.util.SimpleFieldMapper; import org.hibernate.search.integrationtest.backend.tck.testsupport.util.extension.SearchSetupHelper; import org.hibernate.search.util.common.SearchException; import org.hibernate.search.util.impl.integrationtest.mapper.stub.GenericStubMappingScope; import org.hibernate.search.util.impl.integrationtest.mapper.stub.SimpleMappedIndex; import org.hibernate.search.util.impl.integrationtest.mapper.stub.StubMappingScope; import org.hibernate.search.util.impl.test.annotation.TestForIssue; import org.junit.jupiter.api.BeforeEach; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.extension.RegisterExtension; import org.assertj.core.api.ThrowableAssert; import org.mockito.Mockito; import org.mockito.junit.jupiter.MockitoSettings; import org.mockito.quality.Strictness; /** * Generic tests for projections. More specific tests can be found in other classes, such as {@link FieldProjectionSingleValuedBaseIT}. */ @MockitoSettings(strictness = Strictness.STRICT_STUBS) @SuppressWarnings("unchecked") // Mocking parameterized types class SearchProjectionIT { private static final String DOCUMENT_1 = "1"; private static final String DOCUMENT_2 = "2"; private static final String DOCUMENT_3 = "3"; private static final String EMPTY = "empty"; private static final ProjectionMappedTypeContext mainTypeContextMock = Mockito.mock( ProjectionMappedTypeContext.class ); @RegisterExtension public final SearchSetupHelper setupHelper = SearchSetupHelper.create(); private final SimpleMappedIndex<IndexBinding> mainIndex = SimpleMappedIndex.of( IndexBinding::new ).name( "main" ); private final SimpleMappedIndex<IndexBinding> otherIndex = // Using the same mapping here. But a different mapping would work the same. // What matters here is that is a different index. SimpleMappedIndex.of( IndexBinding::new ).name( "other" ); private final SimpleMappedIndex<IndexBinding> anotherIndex = // Using the same mapping here. But a different mapping would work the same. // What matters here is that is a different index. SimpleMappedIndex.of( IndexBinding::new ).name( "another" ); @BeforeEach void setup() { setupHelper.start().withIndexes( mainIndex, otherIndex, anotherIndex ).setup(); initData(); } @Test void noProjections() { StubMappingScope scope = mainIndex.createScope(); SearchQuery<List<?>> query = scope.query() .select() .where( f -> f.matchAll() ) .toQuery(); assertThatQuery( query ).hasTotalHitCount( 4 ); } @Test void references_noLoadingContext() { StubMappingScope scope = mainIndex.createScope(); SearchQuery<List<?>> query; DocumentReference document1Reference = reference( mainIndex.typeName(), DOCUMENT_1 ); DocumentReference document2Reference = reference( mainIndex.typeName(), DOCUMENT_2 ); DocumentReference document3Reference = reference( mainIndex.typeName(), DOCUMENT_3 ); DocumentReference emptyReference = reference( mainIndex.typeName(), EMPTY ); /* * Note to test writers: make sure to assign these projections to variables, * just so that tests do not compile if someone changes the APIs in an incorrect way. */ SearchProjection<DocumentReference> documentReferenceProjection = scope.projection().documentReference().toProjection(); SearchProjection<Object> idProjection = scope.projection().id().toProjection(); query = scope.query() .select( documentReferenceProjection, idProjection ) .where( f -> f.matchAll() ) .toQuery(); assertThatQuery( query ).hasListHitsAnyOrder( b -> { b.list( document1Reference, DOCUMENT_1 ); b.list( document2Reference, DOCUMENT_2 ); b.list( document3Reference, DOCUMENT_3 ); b.list( emptyReference, EMPTY ); } ); } @Test @TestForIssue(jiraKey = "HSEARCH-3395") void references() { DocumentReference document1Reference = reference( mainIndex.typeName(), DOCUMENT_1 ); DocumentReference document2Reference = reference( mainIndex.typeName(), DOCUMENT_2 ); DocumentReference document3Reference = reference( mainIndex.typeName(), DOCUMENT_3 ); DocumentReference emptyReference = reference( mainIndex.typeName(), EMPTY ); EntityReference document1EntityReference = StubEntity.reference( document1Reference ); EntityReference document2EntityReference = StubEntity.reference( document2Reference ); EntityReference document3EntityReference = StubEntity.reference( document3Reference ); EntityReference emptyEntityReference = StubEntity.reference( emptyReference ); StubEntity document1LoadedEntity = new StubEntity( document1Reference ); StubEntity document2LoadedEntity = new StubEntity( document2Reference ); StubEntity document3LoadedEntity = new StubEntity( document3Reference ); StubEntity emptyLoadedEntity = new StubEntity( emptyReference ); SearchLoadingContext<StubEntity> loadingContextMock = mock( SearchLoadingContext.class ); when( mainTypeContextMock.loadingAvailable() ).thenReturn( true ); mainIndex.mapping().with() .typeContext( mainIndex.typeName(), mainTypeContextMock ) .run( () -> { GenericStubMappingScope<?, EntityReference, StubEntity> scope = mainIndex.createGenericScope( loadingContextMock ); SearchQuery<List<?>> query; /* * Note to test writers: make sure to assign these projections to variables, * just so that tests do not compile if someone changes the APIs in an incorrect way. */ SearchProjection<DocumentReference> documentReferenceProjection = scope.projection().documentReference().toProjection(); SearchProjection<EntityReference> entityReferenceProjection = scope.projection().entityReference().toProjection(); SearchProjection<StubEntity> entityProjection = scope.projection().entity().toProjection(); query = scope.query() .select( documentReferenceProjection, entityReferenceProjection, entityProjection ) .where( f -> f.matchAll() ) .toQuery(); expectHitMapping( loadingContextMock, /* * Expect each reference to be transformed because of the reference projection, * but also loaded because of the entity projection. */ c -> c .entityReference( document1Reference, document1EntityReference ) .load( document1Reference, document1LoadedEntity ) .entityReference( document2Reference, document2EntityReference ) .load( document2Reference, document2LoadedEntity ) .entityReference( document3Reference, document3EntityReference ) .load( document3Reference, document3LoadedEntity ) .entityReference( emptyReference, emptyEntityReference ) .load( emptyReference, emptyLoadedEntity ) ); assertThatQuery( query ).hasListHitsAnyOrder( b -> { b.list( document1Reference, document1EntityReference, document1LoadedEntity ); b.list( document2Reference, document2EntityReference, document2LoadedEntity ); b.list( document3Reference, document3EntityReference, document3LoadedEntity ); b.list( emptyReference, emptyEntityReference, emptyLoadedEntity ); } ); } ); } @Test void score() { StubMappingScope scope = mainIndex.createScope(); SearchQuery<Float> query = scope.query() .select( f -> f.score() ) .where( f -> f.match().field( mainIndex.binding().scoreField.relativeFieldName ).matching( "scorepattern" ) ) .sort( f -> f.score().desc() ) .toQuery(); SearchResult<Float> result = query.fetchAll(); assertThatResult( result ).hasTotalHitCount( 2 ); Float score1 = result.hits().get( 0 ); Float score2 = result.hits().get( 1 ); assertThat( score1 ).isNotNull().isNotNaN(); assertThat( score2 ).isNotNull().isNotNaN(); assertThat( score1 ).isGreaterThan( score2 ); } /** * Test projection on the score when we do not sort by score. */ @Test void score_noScoreSort() { StubMappingScope scope = mainIndex.createScope(); SearchQuery<Float> query = scope.query() .select( f -> f.score() ) .where( f -> f.match().field( mainIndex.binding().scoreField.relativeFieldName ).matching( "scorepattern" ) ) .sort( f -> f.indexOrder() ) .toQuery(); SearchResult<Float> result = query.fetchAll(); assertThatResult( result ).hasTotalHitCount( 2 ); Float score1 = result.hits().get( 0 ); Float score2 = result.hits().get( 1 ); assertThat( score1 ).isNotNull().isNotNaN(); assertThat( score2 ).isNotNull().isNotNaN(); } @Test void constant_nonNull() { StubMappingScope scope = mainIndex.createScope(); String constantValue = "foo"; assertThatQuery( scope.query() .select( f -> f.composite( f.id(), f.constant( "foo" ) ) ) .where( f -> f.matchAll() ) .sort( f -> f.score().desc() ) ) .hasListHitsAnyOrder( b -> { b.list( DOCUMENT_1, constantValue ); b.list( DOCUMENT_2, constantValue ); b.list( DOCUMENT_3, constantValue ); b.list( EMPTY, constantValue ); } ); } @Test void constant_null() { StubMappingScope scope = mainIndex.createScope(); assertThatQuery( scope.query() .select( f -> f.composite( f.id(), f.constant( null ) ) ) .where( f -> f.matchAll() ) .sort( f -> f.score().desc() ) ) .hasListHitsAnyOrder( b -> { b.list( DOCUMENT_1, (Object) null ); b.list( DOCUMENT_2, (Object) null ); b.list( DOCUMENT_3, (Object) null ); b.list( EMPTY, (Object) null ); } ); } @Test void constant_root() { StubMappingScope scope = mainIndex.createScope(); String constantValue = "foo"; assertThatQuery( scope.query() .select( f -> f.constant( "foo" ) ) .where( f -> f.matchAll() ) .sort( f -> f.score().desc() ) ) .hasHitsAnyOrder( constantValue, // Doc 1 constantValue, // Doc 2 constantValue, // Doc 3 constantValue // Empty doc ); } /** * Test mixing multiple projection types (field projections, special projections, ...), * and also multiple field projections. */ @Test void mixed() { StubMappingScope scope = mainIndex.createScope(); SearchQuery<List<?>> query; query = scope.query() .select( f -> f.composite( f.field( mainIndex.binding().string1Field.relativeFieldName, String.class ), f.documentReference(), f.field( mainIndex.binding().string2Field.relativeFieldName, String.class ) ) ) .where( f -> f.matchAll() ) .toQuery(); assertThatQuery( query ).hasListHitsAnyOrder( b -> { b.list( mainIndex.binding().string1Field.document1Value.indexedValue, reference( mainIndex.typeName(), DOCUMENT_1 ), mainIndex.binding().string2Field.document1Value.indexedValue ); b.list( mainIndex.binding().string1Field.document2Value.indexedValue, reference( mainIndex.typeName(), DOCUMENT_2 ), mainIndex.binding().string2Field.document2Value.indexedValue ); b.list( mainIndex.binding().string1Field.document3Value.indexedValue, reference( mainIndex.typeName(), DOCUMENT_3 ), mainIndex.binding().string2Field.document3Value.indexedValue ); b.list( null, reference( mainIndex.typeName(), EMPTY ), null ); } ); } /** * Test mixing multiple projection types (field projections, special projections, ...), * and also multiple field projections, using nested fields too. */ @Test void mixed_withNestedFields() { StubMappingScope scope = mainIndex.createScope(); SearchQuery<List<?>> query; query = scope.query() .select( f -> f.composite( f.field( mainIndex.binding().string1Field.relativeFieldName, String.class ), f.documentReference(), f.field( "nested." + mainIndex.binding().nestedField.relativeFieldName, String.class ), f.field( "nested.nested." + mainIndex.binding().nestedNestedField.relativeFieldName, String.class ), f.field( "nested.flattened." + mainIndex.binding().flattenedField.relativeFieldName, String.class ) ) ) .where( f -> f.matchAll() ) .toQuery(); assertThatQuery( query ).hasListHitsAnyOrder( b -> { b.list( mainIndex.binding().string1Field.document1Value.indexedValue, reference( mainIndex.typeName(), DOCUMENT_1 ), mainIndex.binding().nestedField.document1Value.indexedValue, mainIndex.binding().nestedNestedField.document1Value.indexedValue, mainIndex.binding().flattenedField.document1Value.indexedValue ); b.list( mainIndex.binding().string1Field.document2Value.indexedValue, reference( mainIndex.typeName(), DOCUMENT_2 ), mainIndex.binding().nestedField.document2Value.indexedValue, mainIndex.binding().nestedNestedField.document2Value.indexedValue, mainIndex.binding().flattenedField.document2Value.indexedValue ); b.list( mainIndex.binding().string1Field.document3Value.indexedValue, reference( mainIndex.typeName(), DOCUMENT_3 ), mainIndex.binding().nestedField.document3Value.indexedValue, mainIndex.binding().nestedNestedField.document3Value.indexedValue, mainIndex.binding().flattenedField.document3Value.indexedValue ); b.list( null, reference( mainIndex.typeName(), EMPTY ), null, null, null ); } ); } @Test void reuseProjectionInstance_onScopeTargetingSameIndexes() { StubMappingScope scope = mainIndex.createScope(); SearchProjection<String> projection = scope.projection() .field( mainIndex.binding().string1Field.relativeFieldName, String.class ).toProjection(); String value1 = mainIndex.binding().string1Field.document1Value.indexedValue; String value2 = mainIndex.binding().string1Field.document2Value.indexedValue; String value3 = mainIndex.binding().string1Field.document3Value.indexedValue; SearchQuery<String> query = scope.query() .select( projection ) .where( f -> f.matchAll() ) .toQuery(); assertThatQuery( query ).hasHitsAnyOrder( value1, value2, value3, null ); // reuse the same projection instance on the same scope query = scope.query() .select( projection ) .where( f -> f.matchAll() ) .toQuery(); assertThatQuery( query ).hasHitsAnyOrder( value1, value2, value3, null ); // reuse the same projection instance on a different scope, // targeting the same index query = mainIndex.createScope().query() .select( projection ) .where( f -> f.matchAll() ) .toQuery(); assertThatQuery( query ).hasHitsAnyOrder( value1, value2, value3, null ); projection = mainIndex.createScope( otherIndex ).projection() .field( mainIndex.binding().string1Field.relativeFieldName, String.class ).toProjection(); // reuse the same projection instance on a different scope, // targeting same indexes query = otherIndex.createScope( mainIndex ).query() .select( projection ) .where( f -> f.matchAll() ) .toQuery(); assertThatQuery( query ).hasHitsAnyOrder( value1, value2, value3, null ); } @Test void reuseProjectionInstance_onScopeTargetingDifferentIndexes() { StubMappingScope scope = mainIndex.createScope(); SearchProjection<String> projection = scope.projection() .field( mainIndex.binding().string1Field.relativeFieldName, String.class ).toProjection(); // reuse the same projection instance on a different scope, // targeting a different index assertThatThrownBy( () -> otherIndex.createScope().query() .select( projection ) .where( f -> f.matchAll() ) .toQuery() ) .isInstanceOf( SearchException.class ) .hasMessageContainingAll( "Invalid search projection", "You must build the projection from a scope targeting indexes ", otherIndex.name(), "the given projection was built from a scope targeting ", mainIndex.name() ); // reuse the same projection instance on a different scope, // targeting different indexes assertThatThrownBy( () -> mainIndex.createScope( otherIndex ).query() .select( projection ) .where( f -> f.matchAll() ) .toQuery() ) .isInstanceOf( SearchException.class ) .hasMessageContainingAll( "Invalid search projection", "You must build the projection from a scope targeting indexes ", mainIndex.name(), otherIndex.name(), "the given projection was built from a scope targeting ", mainIndex.name() ); assertFailScope( () -> mainIndex.createScope( otherIndex ).query() .select( projection ) .where( f -> f.matchAll() ) .toQuery(), Set.of( otherIndex.name() ), Set.of( mainIndex.name() ), Set.of( otherIndex.name() ) ); // reuse the same predicate instance on a different scope, // targeting different indexes assertFailScope( () -> mainIndex.createScope( otherIndex ).query() .select( projection ) .where( f -> f.matchAll() ) .toQuery(), Set.of( mainIndex.name(), otherIndex.name() ), Set.of( mainIndex.name() ), Set.of( otherIndex.name() ) ); assertFailScope( () -> otherIndex.createScope( mainIndex ).query() .select( projection ) .where( f -> f.matchAll() ) .toQuery(), Set.of( mainIndex.name(), otherIndex.name() ), Set.of( mainIndex.name() ), Set.of( otherIndex.name() ) ); scope = mainIndex.createScope( otherIndex ); SearchProjection<String> projection2 = scope.projection() .field( mainIndex.binding().string1Field.relativeFieldName, String.class ).toProjection(); assertThatCode( () -> mainIndex.createScope( otherIndex ).query() .select( projection2 ) .where( f -> f.matchAll() ) .toQuery() ) .doesNotThrowAnyException(); assertFailScope( () -> otherIndex.createScope( anotherIndex ).query() .select( projection2 ) .where( f -> f.matchAll() ) .toQuery(), Set.of( otherIndex.name(), anotherIndex.name() ), Set.of( mainIndex.name(), otherIndex.name() ), Set.of( mainIndex.name() ) ); assertFailScope( () -> mainIndex.createScope( anotherIndex ).query() .select( projection2 ) .where( f -> f.matchAll() ) .toQuery(), Set.of( mainIndex.name(), anotherIndex.name() ), Set.of( mainIndex.name(), otherIndex.name() ), Set.of( otherIndex.name() ) ); scope = mainIndex.createScope( otherIndex, anotherIndex ); SearchProjection<String> projection3 = scope.projection() .field( mainIndex.binding().string1Field.relativeFieldName, String.class ).toProjection(); assertThatCode( () -> mainIndex.createScope( otherIndex ).query() .select( projection3 ) .where( f -> f.matchAll() ) .toQuery() ) .doesNotThrowAnyException(); assertThatCode( () -> otherIndex.createScope().query() .select( projection3 ) .where( f -> f.matchAll() ) .toQuery() ) .doesNotThrowAnyException(); assertThatCode( () -> anotherIndex.createScope().query() .select( projection3 ) .where( f -> f.matchAll() ) .toQuery() ) .doesNotThrowAnyException(); assertThatCode( () -> otherIndex.createScope( mainIndex ).query() .select( projection3 ) .where( f -> f.matchAll() ) .toQuery() ) .doesNotThrowAnyException(); assertThatCode( () -> anotherIndex.createScope( mainIndex ).query() .select( projection3 ) .where( f -> f.matchAll() ) .toQuery() ) .doesNotThrowAnyException(); assertThatCode( () -> anotherIndex.createScope( otherIndex ).query() .select( projection3 ) .where( f -> f.matchAll() ) .toQuery() ) .doesNotThrowAnyException(); } private static void assertFailScope(ThrowableAssert.ThrowingCallable query, Set<String> scope, Set<String> projection, Set<String> differences) { List<String> messageParts = new ArrayList<>(); messageParts.add( "Invalid search projection" ); messageParts.add( "You must build the projection from a scope targeting indexes " ); messageParts.addAll( scope ); messageParts.add( "the given projection was built from a scope targeting " ); messageParts.addAll( projection ); messageParts.add( "where indexes [" ); messageParts.addAll( differences ); messageParts.add( "] are missing" ); assertThatThrownBy( query ) .isInstanceOf( SearchException.class ) .hasMessageContainingAll( messageParts.toArray( String[]::new ) ); } @Test void extension() { StubMappingScope scope = mainIndex.createScope(); SearchQuery<String> query; // Mandatory extension, supported query = scope.query() .select( f -> f.extension( new SupportedExtension<>() ) .extendedProjection( "string1", String.class ) ) .where( f -> f.id().matching( DOCUMENT_1 ) ) .toQuery(); assertThatQuery( query ) .hasHitsAnyOrder( mainIndex.binding().string1Field.document1Value.indexedValue ); // Mandatory extension, unsupported assertThatThrownBy( () -> scope.projection().extension( new UnSupportedExtension<>() ) ) .isInstanceOf( SearchException.class ); // Conditional extensions with orElse - two, both supported query = scope.query() .select( f -> f.<String>extension() .ifSupported( new SupportedExtension<>(), extended -> extended.extendedProjection( "string1", String.class ) ) .ifSupported( new SupportedExtension<>(), shouldNotBeCalled() ) .orElseFail() ) .where( f -> f.id().matching( DOCUMENT_1 ) ) .toQuery(); assertThatQuery( query ) .hasHitsAnyOrder( mainIndex.binding().string1Field.document1Value.indexedValue ); // Conditional extensions with orElse - two, second supported query = scope.query() .select( f -> f.<String>extension() .ifSupported( new UnSupportedExtension<>(), shouldNotBeCalled() ) .ifSupported( new SupportedExtension<>(), extended -> extended.extendedProjection( "string1", String.class ) ) .orElse( shouldNotBeCalled() ) ) .where( f -> f.id().matching( DOCUMENT_1 ) ) .toQuery(); assertThatQuery( query ) .hasHitsAnyOrder( mainIndex.binding().string1Field.document1Value.indexedValue ); // Conditional extensions with orElse - two, both unsupported query = scope.query() .select( f -> f.<String>extension() .ifSupported( new UnSupportedExtension<>(), shouldNotBeCalled() ) .ifSupported( new UnSupportedExtension<>(), shouldNotBeCalled() ) .orElse( c -> c.field( "string1", String.class ) ) ) .where( f -> f.id().matching( DOCUMENT_1 ) ) .toQuery(); assertThatQuery( query ) .hasHitsAnyOrder( mainIndex.binding().string1Field.document1Value.indexedValue ); } @Test @TestForIssue(jiraKey = "HSEARCH-4162") void toAbsolutePath() { assertThat( mainIndex.createScope().projection().toAbsolutePath( "string" ) ) .isEqualTo( "string" ); } @Test @TestForIssue(jiraKey = "HSEARCH-4162") void toAbsolutePath_withRoot() { assertThat( mainIndex.createScope().projection().withRoot( "nested" ).toAbsolutePath( "inner" ) ) .isEqualTo( "nested.inner" ); } @Test @TestForIssue(jiraKey = "HSEARCH-4162") void toAbsolutePath_null() { assertThatThrownBy( () -> mainIndex.createScope().projection().toAbsolutePath( null ) ) .isInstanceOf( IllegalArgumentException.class ) .hasMessageContaining( "'relativeFieldPath' must not be null" ); } @Test @TestForIssue(jiraKey = "HSEARCH-4162") void toAbsolutePath_withRoot_null() { assertThatThrownBy( () -> mainIndex.createScope().projection().withRoot( "nested" ).toAbsolutePath( null ) ) .isInstanceOf( IllegalArgumentException.class ) .hasMessageContaining( "'relativeFieldPath' must not be null" ); } private void initData() { mainIndex.bulkIndexer() .add( DOCUMENT_1, document -> { mainIndex.binding().string1Field.document1Value.write( document ); mainIndex.binding().string2Field.document1Value.write( document ); mainIndex.binding().scoreField.document1Value.write( document ); DocumentElement nestedDocument = document.addObject( mainIndex.binding().nestedObject ); mainIndex.binding().nestedField.document1Value.write( nestedDocument ); DocumentElement nestedNestedDocument = nestedDocument.addObject( mainIndex.binding().nestedNestedObject ); mainIndex.binding().nestedNestedField.document1Value.write( nestedNestedDocument ); DocumentElement flattedDocument = nestedDocument.addObject( mainIndex.binding().flattenedObject ); mainIndex.binding().flattenedField.document1Value.write( flattedDocument ); } ) .add( DOCUMENT_2, document -> { mainIndex.binding().string1Field.document2Value.write( document ); mainIndex.binding().string2Field.document2Value.write( document ); mainIndex.binding().scoreField.document2Value.write( document ); DocumentElement nestedDocument = document.addObject( mainIndex.binding().nestedObject ); mainIndex.binding().nestedField.document2Value.write( nestedDocument ); DocumentElement nestedNestedDocument = nestedDocument.addObject( mainIndex.binding().nestedNestedObject ); mainIndex.binding().nestedNestedField.document2Value.write( nestedNestedDocument ); DocumentElement flattedDocument = nestedDocument.addObject( mainIndex.binding().flattenedObject ); mainIndex.binding().flattenedField.document2Value.write( flattedDocument ); } ) .add( DOCUMENT_3, document -> { mainIndex.binding().string1Field.document3Value.write( document ); mainIndex.binding().string2Field.document3Value.write( document ); mainIndex.binding().scoreField.document3Value.write( document ); DocumentElement nestedDocument = document.addObject( mainIndex.binding().nestedObject ); mainIndex.binding().nestedField.document3Value.write( nestedDocument ); DocumentElement nestedNestedDocument = nestedDocument.addObject( mainIndex.binding().nestedNestedObject ); mainIndex.binding().nestedNestedField.document3Value.write( nestedNestedDocument ); DocumentElement flattedDocument = nestedDocument.addObject( mainIndex.binding().flattenedObject ); mainIndex.binding().flattenedField.document3Value.write( flattedDocument ); } ) .add( EMPTY, document -> {} ) .join(); } private static <T, R> Function<T, R> shouldNotBeCalled() { return ignored -> { throw new IllegalStateException( "This should not be called" ); }; } private static class IndexBinding { final FieldModel<String> string1Field; final FieldModel<String> string2Field; final FieldModel<String> scoreField; final IndexObjectFieldReference nestedObject; final FieldModel<String> nestedField; final IndexObjectFieldReference nestedNestedObject; final FieldModel<String> nestedNestedField; final IndexObjectFieldReference flattenedObject; final FieldModel<String> flattenedField; IndexBinding(IndexSchemaElement root) { string1Field = FieldModel.mapper( String.class, "ccc", "mmm", "xxx" ) .map( root, "string1" ); string2Field = FieldModel.mapper( String.class, "ddd", "nnn", "yyy" ) .map( root, "string2" ); scoreField = FieldModel.mapper( c -> c.asString().analyzer( DefaultAnalysisDefinitions.ANALYZER_STANDARD_ENGLISH.name ), "scorepattern scorepattern", "scorepattern", "xxx" ) .map( root, "score" ); IndexSchemaObjectField nested = root.objectField( "nested", ObjectStructure.NESTED ); nestedObject = nested.toReference(); nestedField = FieldModel.mapper( String.class, "eee", "ooo", "zzz" ) .map( nested, "inner" ); IndexSchemaObjectField nestedNested = nested.objectField( "nested", ObjectStructure.NESTED ); nestedNestedObject = nestedNested.toReference(); nestedNestedField = FieldModel.mapper( String.class, "fff", "ppp", "aaa" ) .map( nestedNested, "inner" ); IndexSchemaObjectField flattened = nested.objectField( "flattened", ObjectStructure.FLATTENED ); flattenedObject = flattened.toReference(); flattenedField = FieldModel.mapper( String.class, "ggg", "ooo", "bbb" ) .map( flattened, "inner" ); } } private static class ValueModel<F> { private final IndexFieldReference<F> reference; final F indexedValue; private ValueModel(IndexFieldReference<F> reference, F indexedValue) { this.reference = reference; this.indexedValue = indexedValue; } public void write(DocumentElement target) { target.addValue( reference, indexedValue ); } } private static class FieldModel<F> { static <F> SimpleFieldMapper<F, ?, FieldModel<F>> mapper(Class<F> type, F document1Value, F document2Value, F document3Value) { return mapper( c -> (StandardIndexFieldTypeOptionsStep<?, F>) c.as( type ), document1Value, document2Value, document3Value ); } static <F> SimpleFieldMapper<F, ?, FieldModel<F>> mapper( Function<IndexFieldTypeFactory, StandardIndexFieldTypeOptionsStep<?, F>> configuration, F document1Value, F document2Value, F document3Value) { return SimpleFieldMapper.of( configuration, c -> c.projectable( Projectable.YES ), (reference, name) -> new FieldModel<>( reference, name, document1Value, document2Value, document3Value ) ); } final String relativeFieldName; final ValueModel<F> document1Value; final ValueModel<F> document2Value; final ValueModel<F> document3Value; private FieldModel(IndexFieldReference<F> reference, String relativeFieldName, F document1Value, F document2Value, F document3Value) { this.relativeFieldName = relativeFieldName; this.document1Value = new ValueModel<>( reference, document1Value ); this.document2Value = new ValueModel<>( reference, document2Value ); this.document3Value = new ValueModel<>( reference, document3Value ); } } private static class SupportedExtension<SR, R, E> implements SearchProjectionFactoryExtension<MyExtendedFactory<SR, R, E>, R, E> { @Override public Optional<MyExtendedFactory<SR, R, E>> extendOptional(SearchProjectionFactory<R, E> original) { assertThat( original ).isNotNull(); return Optional.of( new MyExtendedFactory<>( original ) ); } } private static class UnSupportedExtension<SR, R, E> implements SearchProjectionFactoryExtension<MyExtendedFactory<SR, R, E>, R, E> { @Override public Optional<MyExtendedFactory<SR, R, E>> extendOptional(SearchProjectionFactory<R, E> original) { assertThat( original ).isNotNull(); return Optional.empty(); } } private static class MyExtendedFactory<SR, R, E> { private final SearchProjectionFactory<R, E> delegate; MyExtendedFactory(SearchProjectionFactory<R, E> delegate) { this.delegate = delegate; } public <T> ProjectionFinalStep<T> extendedProjection(String fieldName, Class<T> type) { return delegate.field( fieldName, type ); } } }
openjdk/jdk8
36,199
jdk/src/share/classes/javax/naming/spi/NamingManager.java
/* * Copyright (c) 1999, 2012, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package javax.naming.spi; import java.util.Enumeration; import java.util.Hashtable; import java.util.StringTokenizer; import java.net.MalformedURLException; import javax.naming.*; import com.sun.naming.internal.VersionHelper; import com.sun.naming.internal.ResourceManager; import com.sun.naming.internal.FactoryEnumeration; /** * This class contains methods for creating context objects * and objects referred to by location information in the naming * or directory service. *<p> * This class cannot be instantiated. It has only static methods. *<p> * The mention of URL in the documentation for this class refers to * a URL string as defined by RFC 1738 and its related RFCs. It is * any string that conforms to the syntax described therein, and * may not always have corresponding support in the java.net.URL * class or Web browsers. *<p> * NamingManager is safe for concurrent access by multiple threads. *<p> * Except as otherwise noted, * a <tt>Name</tt> or environment parameter * passed to any method is owned by the caller. * The implementation will not modify the object or keep a reference * to it, although it may keep a reference to a clone or copy. * * @author Rosanna Lee * @author Scott Seligman * @since 1.3 */ public class NamingManager { /* * Disallow anyone from creating one of these. * Made package private so that DirectoryManager can subclass. */ NamingManager() {} // should be protected and package private static final VersionHelper helper = VersionHelper.getVersionHelper(); // --------- object factory stuff /** * Package-private; used by DirectoryManager and NamingManager. */ private static ObjectFactoryBuilder object_factory_builder = null; /** * The ObjectFactoryBuilder determines the policy used when * trying to load object factories. * See getObjectInstance() and class ObjectFactory for a description * of the default policy. * setObjectFactoryBuilder() overrides this default policy by installing * an ObjectFactoryBuilder. Subsequent object factories will * be loaded and created using the installed builder. *<p> * The builder can only be installed if the executing thread is allowed * (by the security manager's checkSetFactory() method) to do so. * Once installed, the builder cannot be replaced. *<p> * @param builder The factory builder to install. If null, no builder * is installed. * @exception SecurityException builder cannot be installed * for security reasons. * @exception NamingException builder cannot be installed for * a non-security-related reason. * @exception IllegalStateException If a factory has already been installed. * @see #getObjectInstance * @see ObjectFactory * @see ObjectFactoryBuilder * @see java.lang.SecurityManager#checkSetFactory */ public static synchronized void setObjectFactoryBuilder( ObjectFactoryBuilder builder) throws NamingException { if (object_factory_builder != null) throw new IllegalStateException("ObjectFactoryBuilder already set"); SecurityManager security = System.getSecurityManager(); if (security != null) { security.checkSetFactory(); } object_factory_builder = builder; } /** * Used for accessing object factory builder. */ static synchronized ObjectFactoryBuilder getObjectFactoryBuilder() { return object_factory_builder; } /** * Retrieves the ObjectFactory for the object identified by a reference, * using the reference's factory class name and factory codebase * to load in the factory's class. * @param ref The non-null reference to use. * @param factoryName The non-null class name of the factory. * @return The object factory for the object identified by ref; null * if unable to load the factory. */ static ObjectFactory getObjectFactoryFromReference( Reference ref, String factoryName) throws IllegalAccessException, InstantiationException, MalformedURLException { Class<?> clas = null; // Try to use current class loader try { clas = helper.loadClass(factoryName); } catch (ClassNotFoundException e) { // ignore and continue // e.printStackTrace(); } // All other exceptions are passed up. // Not in class path; try to use codebase String codebase; if (clas == null && (codebase = ref.getFactoryClassLocation()) != null) { try { clas = helper.loadClass(factoryName, codebase); } catch (ClassNotFoundException e) { } } return (clas != null) ? (ObjectFactory) clas.newInstance() : null; } /** * Creates an object using the factories specified in the * <tt>Context.OBJECT_FACTORIES</tt> property of the environment * or of the provider resource file associated with <tt>nameCtx</tt>. * * @return factory created; null if cannot create */ private static Object createObjectFromFactories(Object obj, Name name, Context nameCtx, Hashtable<?,?> environment) throws Exception { FactoryEnumeration factories = ResourceManager.getFactories( Context.OBJECT_FACTORIES, environment, nameCtx); if (factories == null) return null; // Try each factory until one succeeds ObjectFactory factory; Object answer = null; while (answer == null && factories.hasMore()) { factory = (ObjectFactory)factories.next(); answer = factory.getObjectInstance(obj, name, nameCtx, environment); } return answer; } private static String getURLScheme(String str) { int colon_posn = str.indexOf(':'); int slash_posn = str.indexOf('/'); if (colon_posn > 0 && (slash_posn == -1 || colon_posn < slash_posn)) return str.substring(0, colon_posn); return null; } /** * Creates an instance of an object for the specified object * and environment. * <p> * If an object factory builder has been installed, it is used to * create a factory for creating the object. * Otherwise, the following rules are used to create the object: *<ol> * <li>If <code>refInfo</code> is a <code>Reference</code> * or <code>Referenceable</code> containing a factory class name, * use the named factory to create the object. * Return <code>refInfo</code> if the factory cannot be created. * Under JDK 1.1, if the factory class must be loaded from a location * specified in the reference, a <tt>SecurityManager</tt> must have * been installed or the factory creation will fail. * If an exception is encountered while creating the factory, * it is passed up to the caller. * <li>If <tt>refInfo</tt> is a <tt>Reference</tt> or * <tt>Referenceable</tt> with no factory class name, * and the address or addresses are <tt>StringRefAddr</tt>s with * address type "URL", * try the URL context factory corresponding to each URL's scheme id * to create the object (see <tt>getURLContext()</tt>). * If that fails, continue to the next step. * <li> Use the object factories specified in * the <tt>Context.OBJECT_FACTORIES</tt> property of the environment, * and of the provider resource file associated with * <tt>nameCtx</tt>, in that order. * The value of this property is a colon-separated list of factory * class names that are tried in order, and the first one that succeeds * in creating an object is the one used. * If none of the factories can be loaded, * return <code>refInfo</code>. * If an exception is encountered while creating the object, the * exception is passed up to the caller. *</ol> *<p> * Service providers that implement the <tt>DirContext</tt> * interface should use * <tt>DirectoryManager.getObjectInstance()</tt>, not this method. * Service providers that implement only the <tt>Context</tt> * interface should use this method. * <p> * Note that an object factory (an object that implements the ObjectFactory * interface) must be public and must have a public constructor that * accepts no arguments. * <p> * The <code>name</code> and <code>nameCtx</code> parameters may * optionally be used to specify the name of the object being created. * <code>name</code> is the name of the object, relative to context * <code>nameCtx</code>. This information could be useful to the object * factory or to the object implementation. * If there are several possible contexts from which the object * could be named -- as will often be the case -- it is up to * the caller to select one. A good rule of thumb is to select the * "deepest" context available. * If <code>nameCtx</code> is null, <code>name</code> is relative * to the default initial context. If no name is being specified, the * <code>name</code> parameter should be null. * * @param refInfo The possibly null object for which to create an object. * @param name The name of this object relative to <code>nameCtx</code>. * Specifying a name is optional; if it is * omitted, <code>name</code> should be null. * @param nameCtx The context relative to which the <code>name</code> * parameter is specified. If null, <code>name</code> is * relative to the default initial context. * @param environment The possibly null environment to * be used in the creation of the object factory and the object. * @return An object created using <code>refInfo</code>; or * <code>refInfo</code> if an object cannot be created using * the algorithm described above. * @exception NamingException if a naming exception was encountered * while attempting to get a URL context, or if one of the * factories accessed throws a NamingException. * @exception Exception if one of the factories accessed throws an * exception, or if an error was encountered while loading * and instantiating the factory and object classes. * A factory should only throw an exception if it does not want * other factories to be used in an attempt to create an object. * See ObjectFactory.getObjectInstance(). * @see #getURLContext * @see ObjectFactory * @see ObjectFactory#getObjectInstance */ public static Object getObjectInstance(Object refInfo, Name name, Context nameCtx, Hashtable<?,?> environment) throws Exception { ObjectFactory factory; // Use builder if installed ObjectFactoryBuilder builder = getObjectFactoryBuilder(); if (builder != null) { // builder must return non-null factory factory = builder.createObjectFactory(refInfo, environment); return factory.getObjectInstance(refInfo, name, nameCtx, environment); } // Use reference if possible Reference ref = null; if (refInfo instanceof Reference) { ref = (Reference) refInfo; } else if (refInfo instanceof Referenceable) { ref = ((Referenceable)(refInfo)).getReference(); } Object answer; if (ref != null) { String f = ref.getFactoryClassName(); if (f != null) { // if reference identifies a factory, use exclusively factory = getObjectFactoryFromReference(ref, f); if (factory != null) { return factory.getObjectInstance(ref, name, nameCtx, environment); } // No factory found, so return original refInfo. // Will reach this point if factory class is not in // class path and reference does not contain a URL for it return refInfo; } else { // if reference has no factory, check for addresses // containing URLs answer = processURLAddrs(ref, name, nameCtx, environment); if (answer != null) { return answer; } } } // try using any specified factories answer = createObjectFromFactories(refInfo, name, nameCtx, environment); return (answer != null) ? answer : refInfo; } /* * Ref has no factory. For each address of type "URL", try its URL * context factory. Returns null if unsuccessful in creating and * invoking a factory. */ static Object processURLAddrs(Reference ref, Name name, Context nameCtx, Hashtable<?,?> environment) throws NamingException { for (int i = 0; i < ref.size(); i++) { RefAddr addr = ref.get(i); if (addr instanceof StringRefAddr && addr.getType().equalsIgnoreCase("URL")) { String url = (String)addr.getContent(); Object answer = processURL(url, name, nameCtx, environment); if (answer != null) { return answer; } } } return null; } private static Object processURL(Object refInfo, Name name, Context nameCtx, Hashtable<?,?> environment) throws NamingException { Object answer; // If refInfo is a URL string, try to use its URL context factory // If no context found, continue to try object factories. if (refInfo instanceof String) { String url = (String)refInfo; String scheme = getURLScheme(url); if (scheme != null) { answer = getURLObject(scheme, refInfo, name, nameCtx, environment); if (answer != null) { return answer; } } } // If refInfo is an array of URL strings, // try to find a context factory for any one of its URLs. // If no context found, continue to try object factories. if (refInfo instanceof String[]) { String[] urls = (String[])refInfo; for (int i = 0; i <urls.length; i++) { String scheme = getURLScheme(urls[i]); if (scheme != null) { answer = getURLObject(scheme, refInfo, name, nameCtx, environment); if (answer != null) return answer; } } } return null; } /** * Retrieves a context identified by <code>obj</code>, using the specified * environment. * Used by ContinuationContext. * * @param obj The object identifying the context. * @param name The name of the context being returned, relative to * <code>nameCtx</code>, or null if no name is being * specified. * See the <code>getObjectInstance</code> method for * details. * @param nameCtx The context relative to which <code>name</code> is * specified, or null for the default initial context. * See the <code>getObjectInstance</code> method for * details. * @param environment Environment specifying characteristics of the * resulting context. * @return A context identified by <code>obj</code>. * * @see #getObjectInstance */ static Context getContext(Object obj, Name name, Context nameCtx, Hashtable<?,?> environment) throws NamingException { Object answer; if (obj instanceof Context) { // %%% Ignore environment for now. OK since method not public. return (Context)obj; } try { answer = getObjectInstance(obj, name, nameCtx, environment); } catch (NamingException e) { throw e; } catch (Exception e) { NamingException ne = new NamingException(); ne.setRootCause(e); throw ne; } return (answer instanceof Context) ? (Context)answer : null; } // Used by ContinuationContext static Resolver getResolver(Object obj, Name name, Context nameCtx, Hashtable<?,?> environment) throws NamingException { Object answer; if (obj instanceof Resolver) { // %%% Ignore environment for now. OK since method not public. return (Resolver)obj; } try { answer = getObjectInstance(obj, name, nameCtx, environment); } catch (NamingException e) { throw e; } catch (Exception e) { NamingException ne = new NamingException(); ne.setRootCause(e); throw ne; } return (answer instanceof Resolver) ? (Resolver)answer : null; } /***************** URL Context implementations ***************/ /** * Creates a context for the given URL scheme id. * <p> * The resulting context is for resolving URLs of the * scheme <code>scheme</code>. The resulting context is not tied * to a specific URL. It is able to handle arbitrary URLs with * the specified scheme. *<p> * The class name of the factory that creates the resulting context * has the naming convention <i>scheme-id</i>URLContextFactory * (e.g. "ftpURLContextFactory" for the "ftp" scheme-id), * in the package specified as follows. * The <tt>Context.URL_PKG_PREFIXES</tt> environment property (which * may contain values taken from applet parameters, system properties, * or application resource files) * contains a colon-separated list of package prefixes. * Each package prefix in * the property is tried in the order specified to load the factory class. * The default package prefix is "com.sun.jndi.url" (if none of the * specified packages work, this default is tried). * The complete package name is constructed using the package prefix, * concatenated with the scheme id. *<p> * For example, if the scheme id is "ldap", and the * <tt>Context.URL_PKG_PREFIXES</tt> property * contains "com.widget:com.wiz.jndi", * the naming manager would attempt to load the following classes * until one is successfully instantiated: *<ul> * <li>com.widget.ldap.ldapURLContextFactory * <li>com.wiz.jndi.ldap.ldapURLContextFactory * <li>com.sun.jndi.url.ldap.ldapURLContextFactory *</ul> * If none of the package prefixes work, null is returned. *<p> * If a factory is instantiated, it is invoked with the following * parameters to produce the resulting context. * <p> * <code>factory.getObjectInstance(null, environment);</code> * <p> * For example, invoking getObjectInstance() as shown above * on a LDAP URL context factory would return a * context that can resolve LDAP urls * (e.g. "ldap://ldap.wiz.com/o=wiz,c=us", * "ldap://ldap.umich.edu/o=umich,c=us", ...). *<p> * Note that an object factory (an object that implements the ObjectFactory * interface) must be public and must have a public constructor that * accepts no arguments. * * @param scheme The non-null scheme-id of the URLs supported by the context. * @param environment The possibly null environment properties to be * used in the creation of the object factory and the context. * @return A context for resolving URLs with the * scheme id <code>scheme</code>; * <code>null</code> if the factory for creating the * context is not found. * @exception NamingException If a naming exception occurs while creating * the context. * @see #getObjectInstance * @see ObjectFactory#getObjectInstance */ public static Context getURLContext(String scheme, Hashtable<?,?> environment) throws NamingException { // pass in 'null' to indicate creation of generic context for scheme // (i.e. not specific to a URL). Object answer = getURLObject(scheme, null, null, null, environment); if (answer instanceof Context) { return (Context)answer; } else { return null; } } private static final String defaultPkgPrefix = "com.sun.jndi.url"; /** * Creates an object for the given URL scheme id using * the supplied urlInfo. * <p> * If urlInfo is null, the result is a context for resolving URLs * with the scheme id 'scheme'. * If urlInfo is a URL, the result is a context named by the URL. * Names passed to this context is assumed to be relative to this * context (i.e. not a URL). For example, if urlInfo is * "ldap://ldap.wiz.com/o=Wiz,c=us", the resulting context will * be that pointed to by "o=Wiz,c=us" on the server 'ldap.wiz.com'. * Subsequent names that can be passed to this context will be * LDAP names relative to this context (e.g. cn="Barbs Jensen"). * If urlInfo is an array of URLs, the URLs are assumed * to be equivalent in terms of the context to which they refer. * The resulting context is like that of the single URL case. * If urlInfo is of any other type, that is handled by the * context factory for the URL scheme. * @param scheme the URL scheme id for the context * @param urlInfo information used to create the context * @param name name of this object relative to <code>nameCtx</code> * @param nameCtx Context whose provider resource file will be searched * for package prefix values (or null if none) * @param environment Environment properties for creating the context * @see javax.naming.InitialContext */ private static Object getURLObject(String scheme, Object urlInfo, Name name, Context nameCtx, Hashtable<?,?> environment) throws NamingException { // e.g. "ftpURLContextFactory" ObjectFactory factory = (ObjectFactory)ResourceManager.getFactory( Context.URL_PKG_PREFIXES, environment, nameCtx, "." + scheme + "." + scheme + "URLContextFactory", defaultPkgPrefix); if (factory == null) return null; // Found object factory try { return factory.getObjectInstance(urlInfo, name, nameCtx, environment); } catch (NamingException e) { throw e; } catch (Exception e) { NamingException ne = new NamingException(); ne.setRootCause(e); throw ne; } } // ------------ Initial Context Factory Stuff private static InitialContextFactoryBuilder initctx_factory_builder = null; /** * Use this method for accessing initctx_factory_builder while * inside an unsynchronized method. */ private static synchronized InitialContextFactoryBuilder getInitialContextFactoryBuilder() { return initctx_factory_builder; } /** * Creates an initial context using the specified environment * properties. *<p> * If an InitialContextFactoryBuilder has been installed, * it is used to create the factory for creating the initial context. * Otherwise, the class specified in the * <tt>Context.INITIAL_CONTEXT_FACTORY</tt> environment property is used. * Note that an initial context factory (an object that implements the * InitialContextFactory interface) must be public and must have a * public constructor that accepts no arguments. * * @param env The possibly null environment properties used when * creating the context. * @return A non-null initial context. * @exception NoInitialContextException If the * <tt>Context.INITIAL_CONTEXT_FACTORY</tt> property * is not found or names a nonexistent * class or a class that cannot be instantiated, * or if the initial context could not be created for some other * reason. * @exception NamingException If some other naming exception was encountered. * @see javax.naming.InitialContext * @see javax.naming.directory.InitialDirContext */ public static Context getInitialContext(Hashtable<?,?> env) throws NamingException { InitialContextFactory factory; InitialContextFactoryBuilder builder = getInitialContextFactoryBuilder(); if (builder == null) { // No factory installed, use property // Get initial context factory class name String className = env != null ? (String)env.get(Context.INITIAL_CONTEXT_FACTORY) : null; if (className == null) { NoInitialContextException ne = new NoInitialContextException( "Need to specify class name in environment or system " + "property, or as an applet parameter, or in an " + "application resource file: " + Context.INITIAL_CONTEXT_FACTORY); throw ne; } try { factory = (InitialContextFactory) helper.loadClass(className).newInstance(); } catch(Exception e) { NoInitialContextException ne = new NoInitialContextException( "Cannot instantiate class: " + className); ne.setRootCause(e); throw ne; } } else { factory = builder.createInitialContextFactory(env); } return factory.getInitialContext(env); } /** * Sets the InitialContextFactory builder to be builder. * *<p> * The builder can only be installed if the executing thread is allowed by * the security manager to do so. Once installed, the builder cannot * be replaced. * @param builder The initial context factory builder to install. If null, * no builder is set. * @exception SecurityException builder cannot be installed for security * reasons. * @exception NamingException builder cannot be installed for * a non-security-related reason. * @exception IllegalStateException If a builder was previous installed. * @see #hasInitialContextFactoryBuilder * @see java.lang.SecurityManager#checkSetFactory */ public static synchronized void setInitialContextFactoryBuilder( InitialContextFactoryBuilder builder) throws NamingException { if (initctx_factory_builder != null) throw new IllegalStateException( "InitialContextFactoryBuilder already set"); SecurityManager security = System.getSecurityManager(); if (security != null) { security.checkSetFactory(); } initctx_factory_builder = builder; } /** * Determines whether an initial context factory builder has * been set. * @return true if an initial context factory builder has * been set; false otherwise. * @see #setInitialContextFactoryBuilder */ public static boolean hasInitialContextFactoryBuilder() { return (getInitialContextFactoryBuilder() != null); } // ----- Continuation Context Stuff /** * Constant that holds the name of the environment property into * which <tt>getContinuationContext()</tt> stores the value of its * <tt>CannotProceedException</tt> parameter. * This property is inherited by the continuation context, and may * be used by that context's service provider to inspect the * fields of the exception. *<p> * The value of this constant is "java.naming.spi.CannotProceedException". * * @see #getContinuationContext * @since 1.3 */ public static final String CPE = "java.naming.spi.CannotProceedException"; /** * Creates a context in which to continue a context operation. *<p> * In performing an operation on a name that spans multiple * namespaces, a context from one naming system may need to pass * the operation on to the next naming system. The context * implementation does this by first constructing a * <code>CannotProceedException</code> containing information * pinpointing how far it has proceeded. It then obtains a * continuation context from JNDI by calling * <code>getContinuationContext</code>. The context * implementation should then resume the context operation by * invoking the same operation on the continuation context, using * the remainder of the name that has not yet been resolved. *<p> * Before making use of the <tt>cpe</tt> parameter, this method * updates the environment associated with that object by setting * the value of the property <a href="#CPE"><tt>CPE</tt></a> * to <tt>cpe</tt>. This property will be inherited by the * continuation context, and may be used by that context's * service provider to inspect the fields of this exception. * * @param cpe * The non-null exception that triggered this continuation. * @return A non-null Context object for continuing the operation. * @exception NamingException If a naming exception occurred. */ @SuppressWarnings("unchecked") public static Context getContinuationContext(CannotProceedException cpe) throws NamingException { Hashtable<Object,Object> env = (Hashtable<Object,Object>)cpe.getEnvironment(); if (env == null) { env = new Hashtable<>(7); } else { // Make a (shallow) copy of the environment. env = (Hashtable<Object,Object>)env.clone(); } env.put(CPE, cpe); ContinuationContext cctx = new ContinuationContext(cpe, env); return cctx.getTargetContext(); } // ------------ State Factory Stuff /** * Retrieves the state of an object for binding. * <p> * Service providers that implement the <tt>DirContext</tt> interface * should use <tt>DirectoryManager.getStateToBind()</tt>, not this method. * Service providers that implement only the <tt>Context</tt> interface * should use this method. *<p> * This method uses the specified state factories in * the <tt>Context.STATE_FACTORIES</tt> property from the environment * properties, and from the provider resource file associated with * <tt>nameCtx</tt>, in that order. * The value of this property is a colon-separated list of factory * class names that are tried in order, and the first one that succeeds * in returning the object's state is the one used. * If no object's state can be retrieved in this way, return the * object itself. * If an exception is encountered while retrieving the state, the * exception is passed up to the caller. * <p> * Note that a state factory * (an object that implements the StateFactory * interface) must be public and must have a public constructor that * accepts no arguments. * <p> * The <code>name</code> and <code>nameCtx</code> parameters may * optionally be used to specify the name of the object being created. * See the description of "Name and Context Parameters" in * {@link ObjectFactory#getObjectInstance * ObjectFactory.getObjectInstance()} * for details. * <p> * This method may return a <tt>Referenceable</tt> object. The * service provider obtaining this object may choose to store it * directly, or to extract its reference (using * <tt>Referenceable.getReference()</tt>) and store that instead. * * @param obj The non-null object for which to get state to bind. * @param name The name of this object relative to <code>nameCtx</code>, * or null if no name is specified. * @param nameCtx The context relative to which the <code>name</code> * parameter is specified, or null if <code>name</code> is * relative to the default initial context. * @param environment The possibly null environment to * be used in the creation of the state factory and * the object's state. * @return The non-null object representing <tt>obj</tt>'s state for * binding. It could be the object (<tt>obj</tt>) itself. * @exception NamingException If one of the factories accessed throws an * exception, or if an error was encountered while loading * and instantiating the factory and object classes. * A factory should only throw an exception if it does not want * other factories to be used in an attempt to create an object. * See <tt>StateFactory.getStateToBind()</tt>. * @see StateFactory * @see StateFactory#getStateToBind * @see DirectoryManager#getStateToBind * @since 1.3 */ public static Object getStateToBind(Object obj, Name name, Context nameCtx, Hashtable<?,?> environment) throws NamingException { FactoryEnumeration factories = ResourceManager.getFactories( Context.STATE_FACTORIES, environment, nameCtx); if (factories == null) { return obj; } // Try each factory until one succeeds StateFactory factory; Object answer = null; while (answer == null && factories.hasMore()) { factory = (StateFactory)factories.next(); answer = factory.getStateToBind(obj, name, nameCtx, environment); } return (answer != null) ? answer : obj; } }
googleapis/google-cloud-java
35,867
java-securitycenter/proto-google-cloud-securitycenter-v1/src/main/java/com/google/cloud/securitycenter/v1/ValidateEventThreatDetectionCustomModuleRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/securitycenter/v1/securitycenter_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.securitycenter.v1; /** * * * <pre> * Request to validate an Event Threat Detection custom module. * </pre> * * Protobuf type {@code * google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest} */ public final class ValidateEventThreatDetectionCustomModuleRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest) ValidateEventThreatDetectionCustomModuleRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ValidateEventThreatDetectionCustomModuleRequest.newBuilder() to construct. private ValidateEventThreatDetectionCustomModuleRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ValidateEventThreatDetectionCustomModuleRequest() { parent_ = ""; rawText_ = ""; type_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ValidateEventThreatDetectionCustomModuleRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securitycenter.v1.SecuritycenterService .internal_static_google_cloud_securitycenter_v1_ValidateEventThreatDetectionCustomModuleRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securitycenter.v1.SecuritycenterService .internal_static_google_cloud_securitycenter_v1_ValidateEventThreatDetectionCustomModuleRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest .class, com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest .Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. Resource name of the parent to validate the Custom Module under. * * Its format is: * * * `organizations/{organization}/eventThreatDetectionSettings`. * * `folders/{folder}/eventThreatDetectionSettings`. * * `projects/{project}/eventThreatDetectionSettings`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. Resource name of the parent to validate the Custom Module under. * * Its format is: * * * `organizations/{organization}/eventThreatDetectionSettings`. * * `folders/{folder}/eventThreatDetectionSettings`. * * `projects/{project}/eventThreatDetectionSettings`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int RAW_TEXT_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object rawText_ = ""; /** * * * <pre> * Required. The raw text of the module's contents. Used to generate error * messages. * </pre> * * <code>string raw_text = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The rawText. */ @java.lang.Override public java.lang.String getRawText() { java.lang.Object ref = rawText_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); rawText_ = s; return s; } } /** * * * <pre> * Required. The raw text of the module's contents. Used to generate error * messages. * </pre> * * <code>string raw_text = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for rawText. */ @java.lang.Override public com.google.protobuf.ByteString getRawTextBytes() { java.lang.Object ref = rawText_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); rawText_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int TYPE_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object type_ = ""; /** * * * <pre> * Required. The type of the module (e.g. CONFIGURABLE_BAD_IP). * </pre> * * <code>string type = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The type. */ @java.lang.Override public java.lang.String getType() { java.lang.Object ref = type_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); type_ = s; return s; } } /** * * * <pre> * Required. The type of the module (e.g. CONFIGURABLE_BAD_IP). * </pre> * * <code>string type = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for type. */ @java.lang.Override public com.google.protobuf.ByteString getTypeBytes() { java.lang.Object ref = type_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); type_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(rawText_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, rawText_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(type_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, type_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(rawText_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, rawText_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(type_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, type_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest)) { return super.equals(obj); } com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest other = (com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getRawText().equals(other.getRawText())) return false; if (!getType().equals(other.getType())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + RAW_TEXT_FIELD_NUMBER; hash = (53 * hash) + getRawText().hashCode(); hash = (37 * hash) + TYPE_FIELD_NUMBER; hash = (53 * hash) + getType().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request to validate an Event Threat Detection custom module. * </pre> * * Protobuf type {@code * google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest) com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securitycenter.v1.SecuritycenterService .internal_static_google_cloud_securitycenter_v1_ValidateEventThreatDetectionCustomModuleRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securitycenter.v1.SecuritycenterService .internal_static_google_cloud_securitycenter_v1_ValidateEventThreatDetectionCustomModuleRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest .class, com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest .Builder.class); } // Construct using // com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; rawText_ = ""; type_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.securitycenter.v1.SecuritycenterService .internal_static_google_cloud_securitycenter_v1_ValidateEventThreatDetectionCustomModuleRequest_descriptor; } @java.lang.Override public com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest getDefaultInstanceForType() { return com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest build() { com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest buildPartial() { com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest result = new com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest( this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.rawText_ = rawText_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.type_ = type_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest) { return mergeFrom( (com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest other) { if (other == com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest .getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getRawText().isEmpty()) { rawText_ = other.rawText_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getType().isEmpty()) { type_ = other.type_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { rawText_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { type_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. Resource name of the parent to validate the Custom Module under. * * Its format is: * * * `organizations/{organization}/eventThreatDetectionSettings`. * * `folders/{folder}/eventThreatDetectionSettings`. * * `projects/{project}/eventThreatDetectionSettings`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Resource name of the parent to validate the Custom Module under. * * Its format is: * * * `organizations/{organization}/eventThreatDetectionSettings`. * * `folders/{folder}/eventThreatDetectionSettings`. * * `projects/{project}/eventThreatDetectionSettings`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Resource name of the parent to validate the Custom Module under. * * Its format is: * * * `organizations/{organization}/eventThreatDetectionSettings`. * * `folders/{folder}/eventThreatDetectionSettings`. * * `projects/{project}/eventThreatDetectionSettings`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Resource name of the parent to validate the Custom Module under. * * Its format is: * * * `organizations/{organization}/eventThreatDetectionSettings`. * * `folders/{folder}/eventThreatDetectionSettings`. * * `projects/{project}/eventThreatDetectionSettings`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. Resource name of the parent to validate the Custom Module under. * * Its format is: * * * `organizations/{organization}/eventThreatDetectionSettings`. * * `folders/{folder}/eventThreatDetectionSettings`. * * `projects/{project}/eventThreatDetectionSettings`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object rawText_ = ""; /** * * * <pre> * Required. The raw text of the module's contents. Used to generate error * messages. * </pre> * * <code>string raw_text = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The rawText. */ public java.lang.String getRawText() { java.lang.Object ref = rawText_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); rawText_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The raw text of the module's contents. Used to generate error * messages. * </pre> * * <code>string raw_text = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for rawText. */ public com.google.protobuf.ByteString getRawTextBytes() { java.lang.Object ref = rawText_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); rawText_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The raw text of the module's contents. Used to generate error * messages. * </pre> * * <code>string raw_text = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The rawText to set. * @return This builder for chaining. */ public Builder setRawText(java.lang.String value) { if (value == null) { throw new NullPointerException(); } rawText_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The raw text of the module's contents. Used to generate error * messages. * </pre> * * <code>string raw_text = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearRawText() { rawText_ = getDefaultInstance().getRawText(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Required. The raw text of the module's contents. Used to generate error * messages. * </pre> * * <code>string raw_text = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for rawText to set. * @return This builder for chaining. */ public Builder setRawTextBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); rawText_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object type_ = ""; /** * * * <pre> * Required. The type of the module (e.g. CONFIGURABLE_BAD_IP). * </pre> * * <code>string type = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The type. */ public java.lang.String getType() { java.lang.Object ref = type_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); type_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The type of the module (e.g. CONFIGURABLE_BAD_IP). * </pre> * * <code>string type = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The bytes for type. */ public com.google.protobuf.ByteString getTypeBytes() { java.lang.Object ref = type_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); type_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The type of the module (e.g. CONFIGURABLE_BAD_IP). * </pre> * * <code>string type = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The type to set. * @return This builder for chaining. */ public Builder setType(java.lang.String value) { if (value == null) { throw new NullPointerException(); } type_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Required. The type of the module (e.g. CONFIGURABLE_BAD_IP). * </pre> * * <code>string type = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @return This builder for chaining. */ public Builder clearType() { type_ = getDefaultInstance().getType(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Required. The type of the module (e.g. CONFIGURABLE_BAD_IP). * </pre> * * <code>string type = 3 [(.google.api.field_behavior) = REQUIRED];</code> * * @param value The bytes for type to set. * @return This builder for chaining. */ public Builder setTypeBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); type_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest) } // @@protoc_insertion_point(class_scope:google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest) private static final com.google.cloud.securitycenter.v1 .ValidateEventThreatDetectionCustomModuleRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest(); } public static com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ValidateEventThreatDetectionCustomModuleRequest> PARSER = new com.google.protobuf.AbstractParser< ValidateEventThreatDetectionCustomModuleRequest>() { @java.lang.Override public ValidateEventThreatDetectionCustomModuleRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException() .setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ValidateEventThreatDetectionCustomModuleRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ValidateEventThreatDetectionCustomModuleRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.securitycenter.v1.ValidateEventThreatDetectionCustomModuleRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,179
java-analytics-data/google-analytics-data/src/main/java/com/google/analytics/data/v1beta/stub/BetaAnalyticsDataStubSettings.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.analytics.data.v1beta.stub; import static com.google.analytics.data.v1beta.BetaAnalyticsDataClient.ListAudienceExportsPagedResponse; import com.google.analytics.data.v1beta.AudienceExport; import com.google.analytics.data.v1beta.AudienceExportMetadata; import com.google.analytics.data.v1beta.BatchRunPivotReportsRequest; import com.google.analytics.data.v1beta.BatchRunPivotReportsResponse; import com.google.analytics.data.v1beta.BatchRunReportsRequest; import com.google.analytics.data.v1beta.BatchRunReportsResponse; import com.google.analytics.data.v1beta.CheckCompatibilityRequest; import com.google.analytics.data.v1beta.CheckCompatibilityResponse; import com.google.analytics.data.v1beta.CreateAudienceExportRequest; import com.google.analytics.data.v1beta.GetAudienceExportRequest; import com.google.analytics.data.v1beta.GetMetadataRequest; import com.google.analytics.data.v1beta.ListAudienceExportsRequest; import com.google.analytics.data.v1beta.ListAudienceExportsResponse; import com.google.analytics.data.v1beta.Metadata; import com.google.analytics.data.v1beta.QueryAudienceExportRequest; import com.google.analytics.data.v1beta.QueryAudienceExportResponse; import com.google.analytics.data.v1beta.RunPivotReportRequest; import com.google.analytics.data.v1beta.RunPivotReportResponse; import com.google.analytics.data.v1beta.RunRealtimeReportRequest; import com.google.analytics.data.v1beta.RunRealtimeReportResponse; import com.google.analytics.data.v1beta.RunReportRequest; import com.google.analytics.data.v1beta.RunReportResponse; import com.google.api.core.ApiFunction; import com.google.api.core.ApiFuture; import com.google.api.core.BetaApi; import com.google.api.core.ObsoleteApi; import com.google.api.gax.core.GaxProperties; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.GrpcTransportChannel; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.grpc.ProtoOperationTransformers; import com.google.api.gax.httpjson.GaxHttpJsonProperties; import com.google.api.gax.httpjson.HttpJsonTransportChannel; import com.google.api.gax.httpjson.InstantiatingHttpJsonChannelProvider; import com.google.api.gax.longrunning.OperationSnapshot; import com.google.api.gax.longrunning.OperationTimedPollAlgorithm; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.rpc.ApiCallContext; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.OperationCallSettings; import com.google.api.gax.rpc.PageContext; import com.google.api.gax.rpc.PagedCallSettings; import com.google.api.gax.rpc.PagedListDescriptor; import com.google.api.gax.rpc.PagedListResponseFactory; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.longrunning.Operation; import java.io.IOException; import java.time.Duration; import java.util.List; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link BetaAnalyticsDataStub}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (analyticsdata.googleapis.com) and default port (443) are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the * [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings) * of runReport: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * BetaAnalyticsDataStubSettings.Builder betaAnalyticsDataSettingsBuilder = * BetaAnalyticsDataStubSettings.newBuilder(); * betaAnalyticsDataSettingsBuilder * .runReportSettings() * .setRetrySettings( * betaAnalyticsDataSettingsBuilder * .runReportSettings() * .getRetrySettings() * .toBuilder() * .setInitialRetryDelayDuration(Duration.ofSeconds(1)) * .setInitialRpcTimeoutDuration(Duration.ofSeconds(5)) * .setMaxAttempts(5) * .setMaxRetryDelayDuration(Duration.ofSeconds(30)) * .setMaxRpcTimeoutDuration(Duration.ofSeconds(60)) * .setRetryDelayMultiplier(1.3) * .setRpcTimeoutMultiplier(1.5) * .setTotalTimeoutDuration(Duration.ofSeconds(300)) * .build()); * BetaAnalyticsDataStubSettings betaAnalyticsDataSettings = * betaAnalyticsDataSettingsBuilder.build(); * }</pre> * * Please refer to the [Client Side Retry * Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for * additional support in setting retries. * * <p>To configure the RetrySettings of a Long Running Operation method, create an * OperationTimedPollAlgorithm object and update the RPC's polling algorithm. For example, to * configure the RetrySettings for createAudienceExport: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * BetaAnalyticsDataStubSettings.Builder betaAnalyticsDataSettingsBuilder = * BetaAnalyticsDataStubSettings.newBuilder(); * TimedRetryAlgorithm timedRetryAlgorithm = * OperationalTimedPollAlgorithm.create( * RetrySettings.newBuilder() * .setInitialRetryDelayDuration(Duration.ofMillis(500)) * .setRetryDelayMultiplier(1.5) * .setMaxRetryDelayDuration(Duration.ofMillis(5000)) * .setTotalTimeoutDuration(Duration.ofHours(24)) * .build()); * betaAnalyticsDataSettingsBuilder * .createClusterOperationSettings() * .setPollingAlgorithm(timedRetryAlgorithm) * .build(); * }</pre> */ @BetaApi @Generated("by gapic-generator-java") public class BetaAnalyticsDataStubSettings extends StubSettings<BetaAnalyticsDataStubSettings> { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder() .add("https://www.googleapis.com/auth/analytics") .add("https://www.googleapis.com/auth/analytics.readonly") .build(); private final UnaryCallSettings<RunReportRequest, RunReportResponse> runReportSettings; private final UnaryCallSettings<RunPivotReportRequest, RunPivotReportResponse> runPivotReportSettings; private final UnaryCallSettings<BatchRunReportsRequest, BatchRunReportsResponse> batchRunReportsSettings; private final UnaryCallSettings<BatchRunPivotReportsRequest, BatchRunPivotReportsResponse> batchRunPivotReportsSettings; private final UnaryCallSettings<GetMetadataRequest, Metadata> getMetadataSettings; private final UnaryCallSettings<RunRealtimeReportRequest, RunRealtimeReportResponse> runRealtimeReportSettings; private final UnaryCallSettings<CheckCompatibilityRequest, CheckCompatibilityResponse> checkCompatibilitySettings; private final UnaryCallSettings<CreateAudienceExportRequest, Operation> createAudienceExportSettings; private final OperationCallSettings< CreateAudienceExportRequest, AudienceExport, AudienceExportMetadata> createAudienceExportOperationSettings; private final UnaryCallSettings<QueryAudienceExportRequest, QueryAudienceExportResponse> queryAudienceExportSettings; private final UnaryCallSettings<GetAudienceExportRequest, AudienceExport> getAudienceExportSettings; private final PagedCallSettings< ListAudienceExportsRequest, ListAudienceExportsResponse, ListAudienceExportsPagedResponse> listAudienceExportsSettings; private static final PagedListDescriptor< ListAudienceExportsRequest, ListAudienceExportsResponse, AudienceExport> LIST_AUDIENCE_EXPORTS_PAGE_STR_DESC = new PagedListDescriptor< ListAudienceExportsRequest, ListAudienceExportsResponse, AudienceExport>() { @Override public String emptyToken() { return ""; } @Override public ListAudienceExportsRequest injectToken( ListAudienceExportsRequest payload, String token) { return ListAudienceExportsRequest.newBuilder(payload).setPageToken(token).build(); } @Override public ListAudienceExportsRequest injectPageSize( ListAudienceExportsRequest payload, int pageSize) { return ListAudienceExportsRequest.newBuilder(payload).setPageSize(pageSize).build(); } @Override public Integer extractPageSize(ListAudienceExportsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListAudienceExportsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<AudienceExport> extractResources(ListAudienceExportsResponse payload) { return payload.getAudienceExportsList(); } }; private static final PagedListResponseFactory< ListAudienceExportsRequest, ListAudienceExportsResponse, ListAudienceExportsPagedResponse> LIST_AUDIENCE_EXPORTS_PAGE_STR_FACT = new PagedListResponseFactory< ListAudienceExportsRequest, ListAudienceExportsResponse, ListAudienceExportsPagedResponse>() { @Override public ApiFuture<ListAudienceExportsPagedResponse> getFuturePagedResponse( UnaryCallable<ListAudienceExportsRequest, ListAudienceExportsResponse> callable, ListAudienceExportsRequest request, ApiCallContext context, ApiFuture<ListAudienceExportsResponse> futureResponse) { PageContext<ListAudienceExportsRequest, ListAudienceExportsResponse, AudienceExport> pageContext = PageContext.create( callable, LIST_AUDIENCE_EXPORTS_PAGE_STR_DESC, request, context); return ListAudienceExportsPagedResponse.createAsync(pageContext, futureResponse); } }; /** Returns the object with the settings used for calls to runReport. */ public UnaryCallSettings<RunReportRequest, RunReportResponse> runReportSettings() { return runReportSettings; } /** Returns the object with the settings used for calls to runPivotReport. */ public UnaryCallSettings<RunPivotReportRequest, RunPivotReportResponse> runPivotReportSettings() { return runPivotReportSettings; } /** Returns the object with the settings used for calls to batchRunReports. */ public UnaryCallSettings<BatchRunReportsRequest, BatchRunReportsResponse> batchRunReportsSettings() { return batchRunReportsSettings; } /** Returns the object with the settings used for calls to batchRunPivotReports. */ public UnaryCallSettings<BatchRunPivotReportsRequest, BatchRunPivotReportsResponse> batchRunPivotReportsSettings() { return batchRunPivotReportsSettings; } /** Returns the object with the settings used for calls to getMetadata. */ public UnaryCallSettings<GetMetadataRequest, Metadata> getMetadataSettings() { return getMetadataSettings; } /** Returns the object with the settings used for calls to runRealtimeReport. */ public UnaryCallSettings<RunRealtimeReportRequest, RunRealtimeReportResponse> runRealtimeReportSettings() { return runRealtimeReportSettings; } /** Returns the object with the settings used for calls to checkCompatibility. */ public UnaryCallSettings<CheckCompatibilityRequest, CheckCompatibilityResponse> checkCompatibilitySettings() { return checkCompatibilitySettings; } /** Returns the object with the settings used for calls to createAudienceExport. */ public UnaryCallSettings<CreateAudienceExportRequest, Operation> createAudienceExportSettings() { return createAudienceExportSettings; } /** Returns the object with the settings used for calls to createAudienceExport. */ public OperationCallSettings<CreateAudienceExportRequest, AudienceExport, AudienceExportMetadata> createAudienceExportOperationSettings() { return createAudienceExportOperationSettings; } /** Returns the object with the settings used for calls to queryAudienceExport. */ public UnaryCallSettings<QueryAudienceExportRequest, QueryAudienceExportResponse> queryAudienceExportSettings() { return queryAudienceExportSettings; } /** Returns the object with the settings used for calls to getAudienceExport. */ public UnaryCallSettings<GetAudienceExportRequest, AudienceExport> getAudienceExportSettings() { return getAudienceExportSettings; } /** Returns the object with the settings used for calls to listAudienceExports. */ public PagedCallSettings< ListAudienceExportsRequest, ListAudienceExportsResponse, ListAudienceExportsPagedResponse> listAudienceExportsSettings() { return listAudienceExportsSettings; } public BetaAnalyticsDataStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcBetaAnalyticsDataStub.create(this); } if (getTransportChannelProvider() .getTransportName() .equals(HttpJsonTransportChannel.getHttpJsonTransportName())) { return HttpJsonBetaAnalyticsDataStub.create(this); } throw new UnsupportedOperationException( String.format( "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns the default service name. */ @Override public String getServiceName() { return "analyticsdata"; } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ @ObsoleteApi("Use getEndpoint() instead") public static String getDefaultEndpoint() { return "analyticsdata.googleapis.com:443"; } /** Returns the default mTLS service endpoint. */ public static String getDefaultMtlsEndpoint() { return "analyticsdata.mtls.googleapis.com:443"; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder() .setScopesToApply(DEFAULT_SERVICE_SCOPES) .setUseJwtAccessWithScope(true); } /** Returns a builder for the default gRPC ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return InstantiatingGrpcChannelProvider.newBuilder() .setMaxInboundMessageSize(Integer.MAX_VALUE); } /** Returns a builder for the default REST ChannelProvider for this service. */ @BetaApi public static InstantiatingHttpJsonChannelProvider.Builder defaultHttpJsonTransportProviderBuilder() { return InstantiatingHttpJsonChannelProvider.newBuilder(); } public static TransportChannelProvider defaultTransportChannelProvider() { return defaultGrpcTransportProviderBuilder().build(); } public static ApiClientHeaderProvider.Builder defaultGrpcApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken( "gapic", GaxProperties.getLibraryVersion(BetaAnalyticsDataStubSettings.class)) .setTransportToken( GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion()); } public static ApiClientHeaderProvider.Builder defaultHttpJsonApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken( "gapic", GaxProperties.getLibraryVersion(BetaAnalyticsDataStubSettings.class)) .setTransportToken( GaxHttpJsonProperties.getHttpJsonTokenName(), GaxHttpJsonProperties.getHttpJsonVersion()); } public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return BetaAnalyticsDataStubSettings.defaultGrpcApiClientHeaderProviderBuilder(); } /** Returns a new gRPC builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new REST builder for this class. */ public static Builder newHttpJsonBuilder() { return Builder.createHttpJsonDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected BetaAnalyticsDataStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); runReportSettings = settingsBuilder.runReportSettings().build(); runPivotReportSettings = settingsBuilder.runPivotReportSettings().build(); batchRunReportsSettings = settingsBuilder.batchRunReportsSettings().build(); batchRunPivotReportsSettings = settingsBuilder.batchRunPivotReportsSettings().build(); getMetadataSettings = settingsBuilder.getMetadataSettings().build(); runRealtimeReportSettings = settingsBuilder.runRealtimeReportSettings().build(); checkCompatibilitySettings = settingsBuilder.checkCompatibilitySettings().build(); createAudienceExportSettings = settingsBuilder.createAudienceExportSettings().build(); createAudienceExportOperationSettings = settingsBuilder.createAudienceExportOperationSettings().build(); queryAudienceExportSettings = settingsBuilder.queryAudienceExportSettings().build(); getAudienceExportSettings = settingsBuilder.getAudienceExportSettings().build(); listAudienceExportsSettings = settingsBuilder.listAudienceExportsSettings().build(); } /** Builder for BetaAnalyticsDataStubSettings. */ public static class Builder extends StubSettings.Builder<BetaAnalyticsDataStubSettings, Builder> { private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders; private final UnaryCallSettings.Builder<RunReportRequest, RunReportResponse> runReportSettings; private final UnaryCallSettings.Builder<RunPivotReportRequest, RunPivotReportResponse> runPivotReportSettings; private final UnaryCallSettings.Builder<BatchRunReportsRequest, BatchRunReportsResponse> batchRunReportsSettings; private final UnaryCallSettings.Builder< BatchRunPivotReportsRequest, BatchRunPivotReportsResponse> batchRunPivotReportsSettings; private final UnaryCallSettings.Builder<GetMetadataRequest, Metadata> getMetadataSettings; private final UnaryCallSettings.Builder<RunRealtimeReportRequest, RunRealtimeReportResponse> runRealtimeReportSettings; private final UnaryCallSettings.Builder<CheckCompatibilityRequest, CheckCompatibilityResponse> checkCompatibilitySettings; private final UnaryCallSettings.Builder<CreateAudienceExportRequest, Operation> createAudienceExportSettings; private final OperationCallSettings.Builder< CreateAudienceExportRequest, AudienceExport, AudienceExportMetadata> createAudienceExportOperationSettings; private final UnaryCallSettings.Builder<QueryAudienceExportRequest, QueryAudienceExportResponse> queryAudienceExportSettings; private final UnaryCallSettings.Builder<GetAudienceExportRequest, AudienceExport> getAudienceExportSettings; private final PagedCallSettings.Builder< ListAudienceExportsRequest, ListAudienceExportsResponse, ListAudienceExportsPagedResponse> listAudienceExportsSettings; private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions = ImmutableMap.builder(); definitions.put( "no_retry_1_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList())); definitions.put( "retry_policy_0_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList(StatusCode.Code.UNKNOWN))); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder(); RetrySettings settings = null; settings = RetrySettings.newBuilder() .setInitialRpcTimeoutDuration(Duration.ofMillis(60000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ofMillis(60000L)) .setTotalTimeoutDuration(Duration.ofMillis(60000L)) .build(); definitions.put("no_retry_1_params", settings); settings = RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(1000L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelayDuration(Duration.ofMillis(60000L)) .setInitialRpcTimeoutDuration(Duration.ofMillis(60000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ofMillis(60000L)) .setTotalTimeoutDuration(Duration.ofMillis(60000L)) .build(); definitions.put("retry_policy_0_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(clientContext); runReportSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); runPivotReportSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); batchRunReportsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); batchRunPivotReportsSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); getMetadataSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); runRealtimeReportSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); checkCompatibilitySettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); createAudienceExportSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); createAudienceExportOperationSettings = OperationCallSettings.newBuilder(); queryAudienceExportSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); getAudienceExportSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); listAudienceExportsSettings = PagedCallSettings.newBuilder(LIST_AUDIENCE_EXPORTS_PAGE_STR_FACT); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( runReportSettings, runPivotReportSettings, batchRunReportsSettings, batchRunPivotReportsSettings, getMetadataSettings, runRealtimeReportSettings, checkCompatibilitySettings, createAudienceExportSettings, queryAudienceExportSettings, getAudienceExportSettings, listAudienceExportsSettings); initDefaults(this); } protected Builder(BetaAnalyticsDataStubSettings settings) { super(settings); runReportSettings = settings.runReportSettings.toBuilder(); runPivotReportSettings = settings.runPivotReportSettings.toBuilder(); batchRunReportsSettings = settings.batchRunReportsSettings.toBuilder(); batchRunPivotReportsSettings = settings.batchRunPivotReportsSettings.toBuilder(); getMetadataSettings = settings.getMetadataSettings.toBuilder(); runRealtimeReportSettings = settings.runRealtimeReportSettings.toBuilder(); checkCompatibilitySettings = settings.checkCompatibilitySettings.toBuilder(); createAudienceExportSettings = settings.createAudienceExportSettings.toBuilder(); createAudienceExportOperationSettings = settings.createAudienceExportOperationSettings.toBuilder(); queryAudienceExportSettings = settings.queryAudienceExportSettings.toBuilder(); getAudienceExportSettings = settings.getAudienceExportSettings.toBuilder(); listAudienceExportsSettings = settings.listAudienceExportsSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( runReportSettings, runPivotReportSettings, batchRunReportsSettings, batchRunPivotReportsSettings, getMetadataSettings, runRealtimeReportSettings, checkCompatibilitySettings, createAudienceExportSettings, queryAudienceExportSettings, getAudienceExportSettings, listAudienceExportsSettings); } private static Builder createDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder createHttpJsonDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultHttpJsonTransportProviderBuilder().build()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultHttpJsonApiClientHeaderProviderBuilder().build()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder initDefaults(Builder builder) { builder .runReportSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); builder .runPivotReportSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); builder .batchRunReportsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); builder .batchRunPivotReportsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); builder .getMetadataSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); builder .runRealtimeReportSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); builder .checkCompatibilitySettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_1_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_1_params")); builder .createAudienceExportSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .queryAudienceExportSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .getAudienceExportSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .listAudienceExportsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")); builder .createAudienceExportOperationSettings() .setInitialCallSettings( UnaryCallSettings .<CreateAudienceExportRequest, OperationSnapshot>newUnaryCallSettingsBuilder() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_0_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_0_params")) .build()) .setResponseTransformer( ProtoOperationTransformers.ResponseTransformer.create(AudienceExport.class)) .setMetadataTransformer( ProtoOperationTransformers.MetadataTransformer.create(AudienceExportMetadata.class)) .setPollingAlgorithm( OperationTimedPollAlgorithm.create( RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(5000L)) .setRetryDelayMultiplier(1.5) .setMaxRetryDelayDuration(Duration.ofMillis(45000L)) .setInitialRpcTimeoutDuration(Duration.ZERO) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ZERO) .setTotalTimeoutDuration(Duration.ofMillis(300000L)) .build())); return builder; } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); return this; } public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() { return unaryMethodSettingsBuilders; } /** Returns the builder for the settings used for calls to runReport. */ public UnaryCallSettings.Builder<RunReportRequest, RunReportResponse> runReportSettings() { return runReportSettings; } /** Returns the builder for the settings used for calls to runPivotReport. */ public UnaryCallSettings.Builder<RunPivotReportRequest, RunPivotReportResponse> runPivotReportSettings() { return runPivotReportSettings; } /** Returns the builder for the settings used for calls to batchRunReports. */ public UnaryCallSettings.Builder<BatchRunReportsRequest, BatchRunReportsResponse> batchRunReportsSettings() { return batchRunReportsSettings; } /** Returns the builder for the settings used for calls to batchRunPivotReports. */ public UnaryCallSettings.Builder<BatchRunPivotReportsRequest, BatchRunPivotReportsResponse> batchRunPivotReportsSettings() { return batchRunPivotReportsSettings; } /** Returns the builder for the settings used for calls to getMetadata. */ public UnaryCallSettings.Builder<GetMetadataRequest, Metadata> getMetadataSettings() { return getMetadataSettings; } /** Returns the builder for the settings used for calls to runRealtimeReport. */ public UnaryCallSettings.Builder<RunRealtimeReportRequest, RunRealtimeReportResponse> runRealtimeReportSettings() { return runRealtimeReportSettings; } /** Returns the builder for the settings used for calls to checkCompatibility. */ public UnaryCallSettings.Builder<CheckCompatibilityRequest, CheckCompatibilityResponse> checkCompatibilitySettings() { return checkCompatibilitySettings; } /** Returns the builder for the settings used for calls to createAudienceExport. */ public UnaryCallSettings.Builder<CreateAudienceExportRequest, Operation> createAudienceExportSettings() { return createAudienceExportSettings; } /** Returns the builder for the settings used for calls to createAudienceExport. */ public OperationCallSettings.Builder< CreateAudienceExportRequest, AudienceExport, AudienceExportMetadata> createAudienceExportOperationSettings() { return createAudienceExportOperationSettings; } /** Returns the builder for the settings used for calls to queryAudienceExport. */ public UnaryCallSettings.Builder<QueryAudienceExportRequest, QueryAudienceExportResponse> queryAudienceExportSettings() { return queryAudienceExportSettings; } /** Returns the builder for the settings used for calls to getAudienceExport. */ public UnaryCallSettings.Builder<GetAudienceExportRequest, AudienceExport> getAudienceExportSettings() { return getAudienceExportSettings; } /** Returns the builder for the settings used for calls to listAudienceExports. */ public PagedCallSettings.Builder< ListAudienceExportsRequest, ListAudienceExportsResponse, ListAudienceExportsPagedResponse> listAudienceExportsSettings() { return listAudienceExportsSettings; } @Override public BetaAnalyticsDataStubSettings build() throws IOException { return new BetaAnalyticsDataStubSettings(this); } } }
apache/hive
35,916
standalone-metastore/metastore-common/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/metastore/api/GetPartitionNamesPsRequest.java
/** * Autogenerated by Thrift Compiler (0.16.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hive.metastore.api; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.16.0)") @org.apache.hadoop.classification.InterfaceAudience.Public @org.apache.hadoop.classification.InterfaceStability.Stable public class GetPartitionNamesPsRequest implements org.apache.thrift.TBase<GetPartitionNamesPsRequest, GetPartitionNamesPsRequest._Fields>, java.io.Serializable, Cloneable, Comparable<GetPartitionNamesPsRequest> { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("GetPartitionNamesPsRequest"); private static final org.apache.thrift.protocol.TField CAT_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("catName", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField DB_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("dbName", org.apache.thrift.protocol.TType.STRING, (short)2); private static final org.apache.thrift.protocol.TField TBL_NAME_FIELD_DESC = new org.apache.thrift.protocol.TField("tblName", org.apache.thrift.protocol.TType.STRING, (short)3); private static final org.apache.thrift.protocol.TField PART_VALUES_FIELD_DESC = new org.apache.thrift.protocol.TField("partValues", org.apache.thrift.protocol.TType.LIST, (short)4); private static final org.apache.thrift.protocol.TField MAX_PARTS_FIELD_DESC = new org.apache.thrift.protocol.TField("maxParts", org.apache.thrift.protocol.TType.I16, (short)5); private static final org.apache.thrift.protocol.TField VALID_WRITE_ID_LIST_FIELD_DESC = new org.apache.thrift.protocol.TField("validWriteIdList", org.apache.thrift.protocol.TType.STRING, (short)6); private static final org.apache.thrift.protocol.TField ID_FIELD_DESC = new org.apache.thrift.protocol.TField("id", org.apache.thrift.protocol.TType.I64, (short)7); private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new GetPartitionNamesPsRequestStandardSchemeFactory(); private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new GetPartitionNamesPsRequestTupleSchemeFactory(); private @org.apache.thrift.annotation.Nullable java.lang.String catName; // optional private @org.apache.thrift.annotation.Nullable java.lang.String dbName; // required private @org.apache.thrift.annotation.Nullable java.lang.String tblName; // required private @org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> partValues; // optional private short maxParts; // optional private @org.apache.thrift.annotation.Nullable java.lang.String validWriteIdList; // optional private long id; // optional /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { CAT_NAME((short)1, "catName"), DB_NAME((short)2, "dbName"), TBL_NAME((short)3, "tblName"), PART_VALUES((short)4, "partValues"), MAX_PARTS((short)5, "maxParts"), VALID_WRITE_ID_LIST((short)6, "validWriteIdList"), ID((short)7, "id"); private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>(); static { for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // CAT_NAME return CAT_NAME; case 2: // DB_NAME return DB_NAME; case 3: // TBL_NAME return TBL_NAME; case 4: // PART_VALUES return PART_VALUES; case 5: // MAX_PARTS return MAX_PARTS; case 6: // VALID_WRITE_ID_LIST return VALID_WRITE_ID_LIST; case 7: // ID return ID; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByName(java.lang.String name) { return byName.get(name); } private final short _thriftId; private final java.lang.String _fieldName; _Fields(short thriftId, java.lang.String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public java.lang.String getFieldName() { return _fieldName; } } // isset id assignments private static final int __MAXPARTS_ISSET_ID = 0; private static final int __ID_ISSET_ID = 1; private byte __isset_bitfield = 0; private static final _Fields optionals[] = {_Fields.CAT_NAME,_Fields.PART_VALUES,_Fields.MAX_PARTS,_Fields.VALID_WRITE_ID_LIST,_Fields.ID}; public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.CAT_NAME, new org.apache.thrift.meta_data.FieldMetaData("catName", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.DB_NAME, new org.apache.thrift.meta_data.FieldMetaData("dbName", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.TBL_NAME, new org.apache.thrift.meta_data.FieldMetaData("tblName", org.apache.thrift.TFieldRequirementType.REQUIRED, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.PART_VALUES, new org.apache.thrift.meta_data.FieldMetaData("partValues", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.ListMetaData(org.apache.thrift.protocol.TType.LIST, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)))); tmpMap.put(_Fields.MAX_PARTS, new org.apache.thrift.meta_data.FieldMetaData("maxParts", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I16))); tmpMap.put(_Fields.VALID_WRITE_ID_LIST, new org.apache.thrift.meta_data.FieldMetaData("validWriteIdList", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.ID, new org.apache.thrift.meta_data.FieldMetaData("id", org.apache.thrift.TFieldRequirementType.OPTIONAL, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64))); metaDataMap = java.util.Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(GetPartitionNamesPsRequest.class, metaDataMap); } public GetPartitionNamesPsRequest() { this.maxParts = (short)-1; this.id = -1L; } public GetPartitionNamesPsRequest( java.lang.String dbName, java.lang.String tblName) { this(); this.dbName = dbName; this.tblName = tblName; } /** * Performs a deep copy on <i>other</i>. */ public GetPartitionNamesPsRequest(GetPartitionNamesPsRequest other) { __isset_bitfield = other.__isset_bitfield; if (other.isSetCatName()) { this.catName = other.catName; } if (other.isSetDbName()) { this.dbName = other.dbName; } if (other.isSetTblName()) { this.tblName = other.tblName; } if (other.isSetPartValues()) { java.util.List<java.lang.String> __this__partValues = new java.util.ArrayList<java.lang.String>(other.partValues); this.partValues = __this__partValues; } this.maxParts = other.maxParts; if (other.isSetValidWriteIdList()) { this.validWriteIdList = other.validWriteIdList; } this.id = other.id; } public GetPartitionNamesPsRequest deepCopy() { return new GetPartitionNamesPsRequest(this); } @Override public void clear() { this.catName = null; this.dbName = null; this.tblName = null; this.partValues = null; this.maxParts = (short)-1; this.validWriteIdList = null; this.id = -1L; } @org.apache.thrift.annotation.Nullable public java.lang.String getCatName() { return this.catName; } public void setCatName(@org.apache.thrift.annotation.Nullable java.lang.String catName) { this.catName = catName; } public void unsetCatName() { this.catName = null; } /** Returns true if field catName is set (has been assigned a value) and false otherwise */ public boolean isSetCatName() { return this.catName != null; } public void setCatNameIsSet(boolean value) { if (!value) { this.catName = null; } } @org.apache.thrift.annotation.Nullable public java.lang.String getDbName() { return this.dbName; } public void setDbName(@org.apache.thrift.annotation.Nullable java.lang.String dbName) { this.dbName = dbName; } public void unsetDbName() { this.dbName = null; } /** Returns true if field dbName is set (has been assigned a value) and false otherwise */ public boolean isSetDbName() { return this.dbName != null; } public void setDbNameIsSet(boolean value) { if (!value) { this.dbName = null; } } @org.apache.thrift.annotation.Nullable public java.lang.String getTblName() { return this.tblName; } public void setTblName(@org.apache.thrift.annotation.Nullable java.lang.String tblName) { this.tblName = tblName; } public void unsetTblName() { this.tblName = null; } /** Returns true if field tblName is set (has been assigned a value) and false otherwise */ public boolean isSetTblName() { return this.tblName != null; } public void setTblNameIsSet(boolean value) { if (!value) { this.tblName = null; } } public int getPartValuesSize() { return (this.partValues == null) ? 0 : this.partValues.size(); } @org.apache.thrift.annotation.Nullable public java.util.Iterator<java.lang.String> getPartValuesIterator() { return (this.partValues == null) ? null : this.partValues.iterator(); } public void addToPartValues(java.lang.String elem) { if (this.partValues == null) { this.partValues = new java.util.ArrayList<java.lang.String>(); } this.partValues.add(elem); } @org.apache.thrift.annotation.Nullable public java.util.List<java.lang.String> getPartValues() { return this.partValues; } public void setPartValues(@org.apache.thrift.annotation.Nullable java.util.List<java.lang.String> partValues) { this.partValues = partValues; } public void unsetPartValues() { this.partValues = null; } /** Returns true if field partValues is set (has been assigned a value) and false otherwise */ public boolean isSetPartValues() { return this.partValues != null; } public void setPartValuesIsSet(boolean value) { if (!value) { this.partValues = null; } } public short getMaxParts() { return this.maxParts; } public void setMaxParts(short maxParts) { this.maxParts = maxParts; setMaxPartsIsSet(true); } public void unsetMaxParts() { __isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __MAXPARTS_ISSET_ID); } /** Returns true if field maxParts is set (has been assigned a value) and false otherwise */ public boolean isSetMaxParts() { return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __MAXPARTS_ISSET_ID); } public void setMaxPartsIsSet(boolean value) { __isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __MAXPARTS_ISSET_ID, value); } @org.apache.thrift.annotation.Nullable public java.lang.String getValidWriteIdList() { return this.validWriteIdList; } public void setValidWriteIdList(@org.apache.thrift.annotation.Nullable java.lang.String validWriteIdList) { this.validWriteIdList = validWriteIdList; } public void unsetValidWriteIdList() { this.validWriteIdList = null; } /** Returns true if field validWriteIdList is set (has been assigned a value) and false otherwise */ public boolean isSetValidWriteIdList() { return this.validWriteIdList != null; } public void setValidWriteIdListIsSet(boolean value) { if (!value) { this.validWriteIdList = null; } } public long getId() { return this.id; } public void setId(long id) { this.id = id; setIdIsSet(true); } public void unsetId() { __isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __ID_ISSET_ID); } /** Returns true if field id is set (has been assigned a value) and false otherwise */ public boolean isSetId() { return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __ID_ISSET_ID); } public void setIdIsSet(boolean value) { __isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __ID_ISSET_ID, value); } public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) { switch (field) { case CAT_NAME: if (value == null) { unsetCatName(); } else { setCatName((java.lang.String)value); } break; case DB_NAME: if (value == null) { unsetDbName(); } else { setDbName((java.lang.String)value); } break; case TBL_NAME: if (value == null) { unsetTblName(); } else { setTblName((java.lang.String)value); } break; case PART_VALUES: if (value == null) { unsetPartValues(); } else { setPartValues((java.util.List<java.lang.String>)value); } break; case MAX_PARTS: if (value == null) { unsetMaxParts(); } else { setMaxParts((java.lang.Short)value); } break; case VALID_WRITE_ID_LIST: if (value == null) { unsetValidWriteIdList(); } else { setValidWriteIdList((java.lang.String)value); } break; case ID: if (value == null) { unsetId(); } else { setId((java.lang.Long)value); } break; } } @org.apache.thrift.annotation.Nullable public java.lang.Object getFieldValue(_Fields field) { switch (field) { case CAT_NAME: return getCatName(); case DB_NAME: return getDbName(); case TBL_NAME: return getTblName(); case PART_VALUES: return getPartValues(); case MAX_PARTS: return getMaxParts(); case VALID_WRITE_ID_LIST: return getValidWriteIdList(); case ID: return getId(); } throw new java.lang.IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new java.lang.IllegalArgumentException(); } switch (field) { case CAT_NAME: return isSetCatName(); case DB_NAME: return isSetDbName(); case TBL_NAME: return isSetTblName(); case PART_VALUES: return isSetPartValues(); case MAX_PARTS: return isSetMaxParts(); case VALID_WRITE_ID_LIST: return isSetValidWriteIdList(); case ID: return isSetId(); } throw new java.lang.IllegalStateException(); } @Override public boolean equals(java.lang.Object that) { if (that instanceof GetPartitionNamesPsRequest) return this.equals((GetPartitionNamesPsRequest)that); return false; } public boolean equals(GetPartitionNamesPsRequest that) { if (that == null) return false; if (this == that) return true; boolean this_present_catName = true && this.isSetCatName(); boolean that_present_catName = true && that.isSetCatName(); if (this_present_catName || that_present_catName) { if (!(this_present_catName && that_present_catName)) return false; if (!this.catName.equals(that.catName)) return false; } boolean this_present_dbName = true && this.isSetDbName(); boolean that_present_dbName = true && that.isSetDbName(); if (this_present_dbName || that_present_dbName) { if (!(this_present_dbName && that_present_dbName)) return false; if (!this.dbName.equals(that.dbName)) return false; } boolean this_present_tblName = true && this.isSetTblName(); boolean that_present_tblName = true && that.isSetTblName(); if (this_present_tblName || that_present_tblName) { if (!(this_present_tblName && that_present_tblName)) return false; if (!this.tblName.equals(that.tblName)) return false; } boolean this_present_partValues = true && this.isSetPartValues(); boolean that_present_partValues = true && that.isSetPartValues(); if (this_present_partValues || that_present_partValues) { if (!(this_present_partValues && that_present_partValues)) return false; if (!this.partValues.equals(that.partValues)) return false; } boolean this_present_maxParts = true && this.isSetMaxParts(); boolean that_present_maxParts = true && that.isSetMaxParts(); if (this_present_maxParts || that_present_maxParts) { if (!(this_present_maxParts && that_present_maxParts)) return false; if (this.maxParts != that.maxParts) return false; } boolean this_present_validWriteIdList = true && this.isSetValidWriteIdList(); boolean that_present_validWriteIdList = true && that.isSetValidWriteIdList(); if (this_present_validWriteIdList || that_present_validWriteIdList) { if (!(this_present_validWriteIdList && that_present_validWriteIdList)) return false; if (!this.validWriteIdList.equals(that.validWriteIdList)) return false; } boolean this_present_id = true && this.isSetId(); boolean that_present_id = true && that.isSetId(); if (this_present_id || that_present_id) { if (!(this_present_id && that_present_id)) return false; if (this.id != that.id) return false; } return true; } @Override public int hashCode() { int hashCode = 1; hashCode = hashCode * 8191 + ((isSetCatName()) ? 131071 : 524287); if (isSetCatName()) hashCode = hashCode * 8191 + catName.hashCode(); hashCode = hashCode * 8191 + ((isSetDbName()) ? 131071 : 524287); if (isSetDbName()) hashCode = hashCode * 8191 + dbName.hashCode(); hashCode = hashCode * 8191 + ((isSetTblName()) ? 131071 : 524287); if (isSetTblName()) hashCode = hashCode * 8191 + tblName.hashCode(); hashCode = hashCode * 8191 + ((isSetPartValues()) ? 131071 : 524287); if (isSetPartValues()) hashCode = hashCode * 8191 + partValues.hashCode(); hashCode = hashCode * 8191 + ((isSetMaxParts()) ? 131071 : 524287); if (isSetMaxParts()) hashCode = hashCode * 8191 + maxParts; hashCode = hashCode * 8191 + ((isSetValidWriteIdList()) ? 131071 : 524287); if (isSetValidWriteIdList()) hashCode = hashCode * 8191 + validWriteIdList.hashCode(); hashCode = hashCode * 8191 + ((isSetId()) ? 131071 : 524287); if (isSetId()) hashCode = hashCode * 8191 + org.apache.thrift.TBaseHelper.hashCode(id); return hashCode; } @Override public int compareTo(GetPartitionNamesPsRequest other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = java.lang.Boolean.compare(isSetCatName(), other.isSetCatName()); if (lastComparison != 0) { return lastComparison; } if (isSetCatName()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.catName, other.catName); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetDbName(), other.isSetDbName()); if (lastComparison != 0) { return lastComparison; } if (isSetDbName()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.dbName, other.dbName); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetTblName(), other.isSetTblName()); if (lastComparison != 0) { return lastComparison; } if (isSetTblName()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.tblName, other.tblName); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetPartValues(), other.isSetPartValues()); if (lastComparison != 0) { return lastComparison; } if (isSetPartValues()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.partValues, other.partValues); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetMaxParts(), other.isSetMaxParts()); if (lastComparison != 0) { return lastComparison; } if (isSetMaxParts()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.maxParts, other.maxParts); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetValidWriteIdList(), other.isSetValidWriteIdList()); if (lastComparison != 0) { return lastComparison; } if (isSetValidWriteIdList()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.validWriteIdList, other.validWriteIdList); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetId(), other.isSetId()); if (lastComparison != 0) { return lastComparison; } if (isSetId()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.id, other.id); if (lastComparison != 0) { return lastComparison; } } return 0; } @org.apache.thrift.annotation.Nullable public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { scheme(iprot).read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { scheme(oprot).write(oprot, this); } @Override public java.lang.String toString() { java.lang.StringBuilder sb = new java.lang.StringBuilder("GetPartitionNamesPsRequest("); boolean first = true; if (isSetCatName()) { sb.append("catName:"); if (this.catName == null) { sb.append("null"); } else { sb.append(this.catName); } first = false; } if (!first) sb.append(", "); sb.append("dbName:"); if (this.dbName == null) { sb.append("null"); } else { sb.append(this.dbName); } first = false; if (!first) sb.append(", "); sb.append("tblName:"); if (this.tblName == null) { sb.append("null"); } else { sb.append(this.tblName); } first = false; if (isSetPartValues()) { if (!first) sb.append(", "); sb.append("partValues:"); if (this.partValues == null) { sb.append("null"); } else { sb.append(this.partValues); } first = false; } if (isSetMaxParts()) { if (!first) sb.append(", "); sb.append("maxParts:"); sb.append(this.maxParts); first = false; } if (isSetValidWriteIdList()) { if (!first) sb.append(", "); sb.append("validWriteIdList:"); if (this.validWriteIdList == null) { sb.append("null"); } else { sb.append(this.validWriteIdList); } first = false; } if (isSetId()) { if (!first) sb.append(", "); sb.append("id:"); sb.append(this.id); first = false; } sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields if (!isSetDbName()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'dbName' is unset! Struct:" + toString()); } if (!isSetTblName()) { throw new org.apache.thrift.protocol.TProtocolException("Required field 'tblName' is unset! Struct:" + toString()); } // check for sub-struct validity } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException { try { // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor. __isset_bitfield = 0; read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class GetPartitionNamesPsRequestStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { public GetPartitionNamesPsRequestStandardScheme getScheme() { return new GetPartitionNamesPsRequestStandardScheme(); } } private static class GetPartitionNamesPsRequestStandardScheme extends org.apache.thrift.scheme.StandardScheme<GetPartitionNamesPsRequest> { public void read(org.apache.thrift.protocol.TProtocol iprot, GetPartitionNamesPsRequest struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // CAT_NAME if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.catName = iprot.readString(); struct.setCatNameIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // DB_NAME if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.dbName = iprot.readString(); struct.setDbNameIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // TBL_NAME if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.tblName = iprot.readString(); struct.setTblNameIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 4: // PART_VALUES if (schemeField.type == org.apache.thrift.protocol.TType.LIST) { { org.apache.thrift.protocol.TList _list1536 = iprot.readListBegin(); struct.partValues = new java.util.ArrayList<java.lang.String>(_list1536.size); @org.apache.thrift.annotation.Nullable java.lang.String _elem1537; for (int _i1538 = 0; _i1538 < _list1536.size; ++_i1538) { _elem1537 = iprot.readString(); struct.partValues.add(_elem1537); } iprot.readListEnd(); } struct.setPartValuesIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 5: // MAX_PARTS if (schemeField.type == org.apache.thrift.protocol.TType.I16) { struct.maxParts = iprot.readI16(); struct.setMaxPartsIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 6: // VALID_WRITE_ID_LIST if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.validWriteIdList = iprot.readString(); struct.setValidWriteIdListIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 7: // ID if (schemeField.type == org.apache.thrift.protocol.TType.I64) { struct.id = iprot.readI64(); struct.setIdIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, GetPartitionNamesPsRequest struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.catName != null) { if (struct.isSetCatName()) { oprot.writeFieldBegin(CAT_NAME_FIELD_DESC); oprot.writeString(struct.catName); oprot.writeFieldEnd(); } } if (struct.dbName != null) { oprot.writeFieldBegin(DB_NAME_FIELD_DESC); oprot.writeString(struct.dbName); oprot.writeFieldEnd(); } if (struct.tblName != null) { oprot.writeFieldBegin(TBL_NAME_FIELD_DESC); oprot.writeString(struct.tblName); oprot.writeFieldEnd(); } if (struct.partValues != null) { if (struct.isSetPartValues()) { oprot.writeFieldBegin(PART_VALUES_FIELD_DESC); { oprot.writeListBegin(new org.apache.thrift.protocol.TList(org.apache.thrift.protocol.TType.STRING, struct.partValues.size())); for (java.lang.String _iter1539 : struct.partValues) { oprot.writeString(_iter1539); } oprot.writeListEnd(); } oprot.writeFieldEnd(); } } if (struct.isSetMaxParts()) { oprot.writeFieldBegin(MAX_PARTS_FIELD_DESC); oprot.writeI16(struct.maxParts); oprot.writeFieldEnd(); } if (struct.validWriteIdList != null) { if (struct.isSetValidWriteIdList()) { oprot.writeFieldBegin(VALID_WRITE_ID_LIST_FIELD_DESC); oprot.writeString(struct.validWriteIdList); oprot.writeFieldEnd(); } } if (struct.isSetId()) { oprot.writeFieldBegin(ID_FIELD_DESC); oprot.writeI64(struct.id); oprot.writeFieldEnd(); } oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class GetPartitionNamesPsRequestTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { public GetPartitionNamesPsRequestTupleScheme getScheme() { return new GetPartitionNamesPsRequestTupleScheme(); } } private static class GetPartitionNamesPsRequestTupleScheme extends org.apache.thrift.scheme.TupleScheme<GetPartitionNamesPsRequest> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, GetPartitionNamesPsRequest struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot; oprot.writeString(struct.dbName); oprot.writeString(struct.tblName); java.util.BitSet optionals = new java.util.BitSet(); if (struct.isSetCatName()) { optionals.set(0); } if (struct.isSetPartValues()) { optionals.set(1); } if (struct.isSetMaxParts()) { optionals.set(2); } if (struct.isSetValidWriteIdList()) { optionals.set(3); } if (struct.isSetId()) { optionals.set(4); } oprot.writeBitSet(optionals, 5); if (struct.isSetCatName()) { oprot.writeString(struct.catName); } if (struct.isSetPartValues()) { { oprot.writeI32(struct.partValues.size()); for (java.lang.String _iter1540 : struct.partValues) { oprot.writeString(_iter1540); } } } if (struct.isSetMaxParts()) { oprot.writeI16(struct.maxParts); } if (struct.isSetValidWriteIdList()) { oprot.writeString(struct.validWriteIdList); } if (struct.isSetId()) { oprot.writeI64(struct.id); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, GetPartitionNamesPsRequest struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot; struct.dbName = iprot.readString(); struct.setDbNameIsSet(true); struct.tblName = iprot.readString(); struct.setTblNameIsSet(true); java.util.BitSet incoming = iprot.readBitSet(5); if (incoming.get(0)) { struct.catName = iprot.readString(); struct.setCatNameIsSet(true); } if (incoming.get(1)) { { org.apache.thrift.protocol.TList _list1541 = iprot.readListBegin(org.apache.thrift.protocol.TType.STRING); struct.partValues = new java.util.ArrayList<java.lang.String>(_list1541.size); @org.apache.thrift.annotation.Nullable java.lang.String _elem1542; for (int _i1543 = 0; _i1543 < _list1541.size; ++_i1543) { _elem1542 = iprot.readString(); struct.partValues.add(_elem1542); } } struct.setPartValuesIsSet(true); } if (incoming.get(2)) { struct.maxParts = iprot.readI16(); struct.setMaxPartsIsSet(true); } if (incoming.get(3)) { struct.validWriteIdList = iprot.readString(); struct.setValidWriteIdListIsSet(true); } if (incoming.get(4)) { struct.id = iprot.readI64(); struct.setIdIsSet(true); } } } private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) { return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme(); } }
apache/tomcat
35,885
java/org/apache/catalina/core/ContainerBase.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.catalina.core; import java.beans.PropertyChangeListener; import java.beans.PropertyChangeSupport; import java.io.File; import java.util.ArrayList; import java.util.HashMap; import java.util.List; import java.util.concurrent.Callable; import java.util.concurrent.CopyOnWriteArrayList; import java.util.concurrent.ExecutionException; import java.util.concurrent.ExecutorService; import java.util.concurrent.Future; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import java.util.concurrent.locks.Lock; import java.util.concurrent.locks.ReadWriteLock; import java.util.concurrent.locks.ReentrantReadWriteLock; import javax.management.ObjectName; import org.apache.catalina.AccessLog; import org.apache.catalina.Cluster; import org.apache.catalina.Container; import org.apache.catalina.ContainerEvent; import org.apache.catalina.ContainerListener; import org.apache.catalina.Context; import org.apache.catalina.Engine; import org.apache.catalina.Host; import org.apache.catalina.Lifecycle; import org.apache.catalina.LifecycleException; import org.apache.catalina.LifecycleState; import org.apache.catalina.Loader; import org.apache.catalina.Pipeline; import org.apache.catalina.Realm; import org.apache.catalina.Server; import org.apache.catalina.Valve; import org.apache.catalina.Wrapper; import org.apache.catalina.connector.Request; import org.apache.catalina.connector.Response; import org.apache.catalina.util.ContextName; import org.apache.catalina.util.LifecycleMBeanBase; import org.apache.juli.logging.Log; import org.apache.juli.logging.LogFactory; import org.apache.tomcat.util.ExceptionUtils; import org.apache.tomcat.util.MultiThrowable; import org.apache.tomcat.util.res.StringManager; import org.apache.tomcat.util.threads.InlineExecutorService; /** * Abstract implementation of the <b>Container</b> interface, providing common functionality required by nearly every * implementation. Classes extending this base class must may implement a replacement for <code>invoke()</code>. * <p> * All subclasses of this abstract base class will include support for a Pipeline object that defines the processing to * be performed for each request received by the <code>invoke()</code> method of this class, utilizing the "Chain of * Responsibility" design pattern. A subclass should encapsulate its own processing functionality as a * <code>Valve</code>, and configure this Valve into the pipeline by calling <code>setBasic()</code>. * <p> * This implementation fires property change events, per the JavaBeans design pattern, for changes in singleton * properties. In addition, it fires the following <code>ContainerEvent</code> events to listeners who register * themselves with <code>addContainerListener()</code>: * <table border=1> * <caption>ContainerEvents fired by this implementation</caption> * <tr> * <th>Type</th> * <th>Data</th> * <th>Description</th> * </tr> * <tr> * <td><code>addChild</code></td> * <td><code>Container</code></td> * <td>Child container added to this Container.</td> * </tr> * <tr> * <td><code>{@link #getPipeline() pipeline}.addValve</code></td> * <td><code>Valve</code></td> * <td>Valve added to this Container.</td> * </tr> * <tr> * <td><code>removeChild</code></td> * <td><code>Container</code></td> * <td>Child container removed from this Container.</td> * </tr> * <tr> * <td><code>{@link #getPipeline() pipeline}.removeValve</code></td> * <td><code>Valve</code></td> * <td>Valve removed from this Container.</td> * </tr> * <tr> * <td><code>start</code></td> * <td><code>null</code></td> * <td>Container was started.</td> * </tr> * <tr> * <td><code>stop</code></td> * <td><code>null</code></td> * <td>Container was stopped.</td> * </tr> * </table> * Subclasses that fire additional events should document them in the class comments of the implementation class. */ public abstract class ContainerBase extends LifecycleMBeanBase implements Container { private static final Log log = LogFactory.getLog(ContainerBase.class); // ----------------------------------------------------- Instance Variables /** * The child Containers belonging to this Container, keyed by name. */ protected final HashMap<String,Container> children = new HashMap<>(); private final ReadWriteLock childrenLock = new ReentrantReadWriteLock(); /** * The processor delay for this component. */ protected int backgroundProcessorDelay = -1; /** * The future allowing control of the background processor. */ protected ScheduledFuture<?> backgroundProcessorFuture; protected ScheduledFuture<?> monitorFuture; /** * The container event listeners for this Container. Implemented as a CopyOnWriteArrayList since listeners may * invoke methods to add/remove themselves or other listeners and with a ReadWriteLock that would trigger a * deadlock. */ protected final List<ContainerListener> listeners = new CopyOnWriteArrayList<>(); /** * The Logger implementation with which this Container is associated. */ protected Log logger = null; /** * Associated logger name. */ protected String logName = null; /** * The cluster with which this Container is associated. */ protected Cluster cluster = null; private final ReadWriteLock clusterLock = new ReentrantReadWriteLock(); /** * The human-readable name of this Container. */ protected String name = null; /** * The parent Container to which this Container is a child. */ protected Container parent = null; /** * The parent class loader to be configured when we install a Loader. */ protected ClassLoader parentClassLoader = null; /** * The Pipeline object with which this Container is associated. */ protected final Pipeline pipeline = new StandardPipeline(this); /** * The Realm with which this Container is associated. */ private volatile Realm realm = null; /** * Lock used to control access to the Realm. */ private final ReadWriteLock realmLock = new ReentrantReadWriteLock(); /** * The string manager for this package. */ protected static final StringManager sm = StringManager.getManager(ContainerBase.class); /** * Will children be started automatically when they are added. */ protected boolean startChildren = true; /** * The property change support for this component. */ protected final PropertyChangeSupport support = new PropertyChangeSupport(this); /** * The access log to use for requests normally handled by this container that have been handled earlier in the * processing chain. */ protected volatile AccessLog accessLog = null; private volatile boolean accessLogScanComplete = false; /** * The number of threads available to process start and stop events for any children associated with this container. */ private int startStopThreads = 1; protected ExecutorService startStopExecutor; // ------------------------------------------------------------- Properties @Override public int getStartStopThreads() { return startStopThreads; } @Override public void setStartStopThreads(int startStopThreads) { int oldStartStopThreads = this.startStopThreads; this.startStopThreads = startStopThreads; // Use local copies to ensure thread safety if (oldStartStopThreads != startStopThreads && startStopExecutor != null) { reconfigureStartStopExecutor(getStartStopThreads()); } } @Override public int getBackgroundProcessorDelay() { return backgroundProcessorDelay; } @Override public void setBackgroundProcessorDelay(int delay) { backgroundProcessorDelay = delay; } @Override public Log getLogger() { if (logger != null) { return logger; } logger = LogFactory.getLog(getLogName()); return logger; } @Override public String getLogName() { if (logName != null) { return logName; } String loggerName = null; Container current = this; while (current != null) { String name = current.getName(); if ((name == null) || (name.isEmpty())) { name = "/"; } else if (name.startsWith("##")) { name = "/" + name; } loggerName = "[" + name + "]" + ((loggerName != null) ? ("." + loggerName) : ""); current = current.getParent(); } logName = ContainerBase.class.getName() + "." + loggerName; return logName; } @Override public Cluster getCluster() { Lock readLock = clusterLock.readLock(); readLock.lock(); try { if (cluster != null) { return cluster; } if (parent != null) { return parent.getCluster(); } return null; } finally { readLock.unlock(); } } /* * Provide access to just the cluster component attached to this container. */ protected Cluster getClusterInternal() { Lock readLock = clusterLock.readLock(); readLock.lock(); try { return cluster; } finally { readLock.unlock(); } } @Override public void setCluster(Cluster cluster) { Cluster oldCluster; Lock writeLock = clusterLock.writeLock(); writeLock.lock(); try { // Change components if necessary oldCluster = this.cluster; if (oldCluster == cluster) { return; } this.cluster = cluster; // Start the new component if necessary if (cluster != null) { cluster.setContainer(this); } } finally { writeLock.unlock(); } // Stop the old component if necessary if (getState().isAvailable() && (oldCluster instanceof Lifecycle)) { try { ((Lifecycle) oldCluster).stop(); } catch (LifecycleException e) { log.error(sm.getString("containerBase.cluster.stop"), e); } } if (getState().isAvailable() && (cluster instanceof Lifecycle)) { try { ((Lifecycle) cluster).start(); } catch (LifecycleException e) { log.error(sm.getString("containerBase.cluster.start"), e); } } // Report this property change to interested listeners support.firePropertyChange("cluster", oldCluster, cluster); } @Override public String getName() { return name; } @Override public void setName(String name) { if (name == null) { throw new IllegalArgumentException(sm.getString("containerBase.nullName")); } String oldName = this.name; this.name = name; support.firePropertyChange("name", oldName, this.name); } /** * Return if children of this container will be started automatically when they are added to this container. * * @return <code>true</code> if the children will be started */ public boolean getStartChildren() { return startChildren; } /** * Set if children of this container will be started automatically when they are added to this container. * * @param startChildren New value of the startChildren flag */ public void setStartChildren(boolean startChildren) { boolean oldStartChildren = this.startChildren; this.startChildren = startChildren; support.firePropertyChange("startChildren", oldStartChildren, this.startChildren); } @Override public Container getParent() { return parent; } @Override public void setParent(Container container) { Container oldParent = this.parent; this.parent = container; support.firePropertyChange("parent", oldParent, this.parent); } @Override public ClassLoader getParentClassLoader() { if (parentClassLoader != null) { return parentClassLoader; } if (parent != null) { return parent.getParentClassLoader(); } return ClassLoader.getSystemClassLoader(); } @Override public void setParentClassLoader(ClassLoader parent) { ClassLoader oldParentClassLoader = this.parentClassLoader; this.parentClassLoader = parent; support.firePropertyChange("parentClassLoader", oldParentClassLoader, this.parentClassLoader); } @Override public Pipeline getPipeline() { return this.pipeline; } @Override public Realm getRealm() { Lock l = realmLock.readLock(); l.lock(); try { if (realm != null) { return realm; } if (parent != null) { return parent.getRealm(); } return null; } finally { l.unlock(); } } protected Realm getRealmInternal() { Lock l = realmLock.readLock(); l.lock(); try { return realm; } finally { l.unlock(); } } @Override public void setRealm(Realm realm) { Realm oldRealm; Lock l = realmLock.writeLock(); l.lock(); try { // Change components if necessary oldRealm = this.realm; if (oldRealm == realm) { return; } this.realm = realm; // Start the new component if necessary if (realm != null) { realm.setContainer(this); } } finally { l.unlock(); } // Stop the old component if necessary if (getState().isAvailable() && oldRealm instanceof Lifecycle) { try { ((Lifecycle) oldRealm).stop(); } catch (LifecycleException e) { log.error(sm.getString("containerBase.realm.stop"), e); } } if (getState().isAvailable() && realm instanceof Lifecycle) { try { ((Lifecycle) realm).start(); } catch (LifecycleException e) { log.error(sm.getString("containerBase.realm.start"), e); } } // Report this property change to interested listeners support.firePropertyChange("realm", oldRealm, this.realm); } // ------------------------------------------------------ Container Methods @Override public void addChild(Container child) { if (log.isDebugEnabled()) { log.debug(sm.getString("containerBase.child.add", child, this)); } childrenLock.writeLock().lock(); try { if (children.get(child.getName()) != null) { throw new IllegalArgumentException(sm.getString("containerBase.child.notUnique", child.getName())); } child.setParent(this); // May throw IAE children.put(child.getName(), child); } finally { childrenLock.writeLock().unlock(); } fireContainerEvent(ADD_CHILD_EVENT, child); // Start child // Don't do this inside sync block - start can be a slow process and // locking the children object can cause problems elsewhere try { if ((getState().isAvailable() || LifecycleState.STARTING_PREP.equals(getState())) && startChildren) { child.start(); } } catch (LifecycleException e) { throw new IllegalStateException(sm.getString("containerBase.child.start"), e); } } @Override public void addContainerListener(ContainerListener listener) { listeners.add(listener); } @Override public void addPropertyChangeListener(PropertyChangeListener listener) { support.addPropertyChangeListener(listener); } @Override public Container findChild(String name) { if (name == null) { return null; } childrenLock.readLock().lock(); try { return children.get(name); } finally { childrenLock.readLock().unlock(); } } @Override public Container[] findChildren() { childrenLock.readLock().lock(); try { return children.values().toArray(new Container[0]); } finally { childrenLock.readLock().unlock(); } } @Override public ContainerListener[] findContainerListeners() { return listeners.toArray(new ContainerListener[0]); } @Override public void removeChild(Container child) { if (child == null) { return; } try { if (child.getState().isAvailable()) { child.stop(); } } catch (LifecycleException e) { log.error(sm.getString("containerBase.child.stop"), e); } boolean destroy = false; try { // child.destroy() may have already been called which would have // triggered this call. If that is the case, no need to destroy the // child again. if (!LifecycleState.DESTROYING.equals(child.getState())) { child.destroy(); destroy = true; } } catch (LifecycleException e) { log.error(sm.getString("containerBase.child.destroy"), e); } if (!destroy) { fireContainerEvent(REMOVE_CHILD_EVENT, child); } childrenLock.writeLock().lock(); try { children.remove(child.getName()); } finally { childrenLock.writeLock().unlock(); } } @Override public void removeContainerListener(ContainerListener listener) { listeners.remove(listener); } @Override public void removePropertyChangeListener(PropertyChangeListener listener) { support.removePropertyChangeListener(listener); } private void reconfigureStartStopExecutor(int threads) { if (threads == 1) { // Use a fake executor if (!(startStopExecutor instanceof InlineExecutorService)) { startStopExecutor = new InlineExecutorService(); } } else { // Delegate utility execution to the Service Server server = Container.getService(this).getServer(); server.setUtilityThreads(threads); startStopExecutor = server.getUtilityExecutor(); } } /** * Start this component and implement the requirements of * {@link org.apache.catalina.util.LifecycleBase#startInternal()}. * * @exception LifecycleException if this component detects a fatal error that prevents this component from being * used */ @Override protected void startInternal() throws LifecycleException { reconfigureStartStopExecutor(getStartStopThreads()); // Start our subordinate components, if any logger = null; getLogger(); Cluster cluster = getClusterInternal(); if (cluster instanceof Lifecycle) { ((Lifecycle) cluster).start(); } Realm realm = getRealmInternal(); if (realm instanceof Lifecycle) { ((Lifecycle) realm).start(); } // Start our child containers, if any Container[] children = findChildren(); List<Future<Void>> results = new ArrayList<>(children.length); for (Container child : children) { results.add(startStopExecutor.submit(new StartChild(child))); } MultiThrowable multiThrowable = null; for (Future<Void> result : results) { try { result.get(); } catch (Throwable t) { log.error(sm.getString("containerBase.threadedStartFailed"), t); if (multiThrowable == null) { multiThrowable = new MultiThrowable(); } multiThrowable.add(t); } } if (multiThrowable != null) { throw new LifecycleException(sm.getString("containerBase.threadedStartFailed"), multiThrowable.getThrowable()); } // Start the Valves in our pipeline (including the basic), if any if (pipeline instanceof Lifecycle) { ((Lifecycle) pipeline).start(); } setState(LifecycleState.STARTING); // Start our thread if (backgroundProcessorDelay > 0) { monitorFuture = Container.getService(ContainerBase.this).getServer().getUtilityExecutor() .scheduleWithFixedDelay(new ContainerBackgroundProcessorMonitor(), 0, 60, TimeUnit.SECONDS); } } /** * Stop this component and implement the requirements of * {@link org.apache.catalina.util.LifecycleBase#stopInternal()}. * * @exception LifecycleException if this component detects a fatal error that prevents this component from being * used */ @Override protected void stopInternal() throws LifecycleException { // Stop our thread if (monitorFuture != null) { monitorFuture.cancel(true); monitorFuture = null; } threadStop(); setState(LifecycleState.STOPPING); // Stop the Valves in our pipeline (including the basic), if any if (pipeline instanceof Lifecycle && ((Lifecycle) pipeline).getState().isAvailable()) { ((Lifecycle) pipeline).stop(); } // Stop our child containers, if any Container[] children = findChildren(); List<Future<Void>> results = new ArrayList<>(children.length); for (Container child : children) { results.add(startStopExecutor.submit(new StopChild(child))); } boolean fail = false; for (Future<Void> result : results) { try { result.get(); } catch (Exception e) { log.error(sm.getString("containerBase.threadedStopFailed"), e); fail = true; } } if (fail) { throw new LifecycleException(sm.getString("containerBase.threadedStopFailed")); } // Stop our subordinate components, if any Realm realm = getRealmInternal(); if (realm instanceof Lifecycle) { ((Lifecycle) realm).stop(); } Cluster cluster = getClusterInternal(); if (cluster instanceof Lifecycle) { ((Lifecycle) cluster).stop(); } // If init fails, this may be null if (startStopExecutor != null) { startStopExecutor.shutdownNow(); startStopExecutor = null; } } @Override protected void destroyInternal() throws LifecycleException { Realm realm = getRealmInternal(); if (realm instanceof Lifecycle) { ((Lifecycle) realm).destroy(); } Cluster cluster = getClusterInternal(); if (cluster instanceof Lifecycle) { ((Lifecycle) cluster).destroy(); } // Stop the Valves in our pipeline (including the basic), if any if (pipeline instanceof Lifecycle) { ((Lifecycle) pipeline).destroy(); } // Remove children now this container is being destroyed for (Container child : findChildren()) { removeChild(child); } // Required if the child is destroyed directly. if (parent != null) { parent.removeChild(this); } super.destroyInternal(); } @Override public void logAccess(Request request, Response response, long time, boolean useDefault) { boolean logged = false; if (getAccessLog() != null) { getAccessLog().log(request, response, time); logged = true; } if (getParent() != null) { // No need to use default logger once request/response has been logged // once getParent().logAccess(request, response, time, (useDefault && !logged)); } } @Override public AccessLog getAccessLog() { if (accessLogScanComplete) { return accessLog; } AccessLogAdapter adapter = null; Valve[] valves = getPipeline().getValves(); for (Valve valve : valves) { if (valve instanceof AccessLog) { if (adapter == null) { adapter = new AccessLogAdapter((AccessLog) valve); } else { adapter.add((AccessLog) valve); } } } if (adapter != null) { accessLog = adapter; } accessLogScanComplete = true; return accessLog; } // ------------------------------------------------------- Pipeline Methods /** * Convenience method, intended for use by the digester to simplify the process of adding Valves to containers. See * {@link Pipeline#addValve(Valve)} for full details. Components other than the digester should use * {@link #getPipeline()}.{@link #addValve(Valve)} in case a future implementation provides an alternative method * for the digester to use. * * @param valve Valve to be added * * @exception IllegalArgumentException if this Container refused to accept the specified Valve * @exception IllegalArgumentException if the specified Valve refuses to be associated with this Container * @exception IllegalStateException if the specified Valve is already associated with a different Container */ public synchronized void addValve(Valve valve) { pipeline.addValve(valve); } @Override public synchronized void backgroundProcess() { if (!getState().isAvailable()) { return; } Cluster cluster = getClusterInternal(); if (cluster != null) { try { cluster.backgroundProcess(); } catch (Exception e) { log.warn(sm.getString("containerBase.backgroundProcess.cluster", cluster), e); } } Realm realm = getRealmInternal(); if (realm != null) { try { realm.backgroundProcess(); } catch (Exception e) { log.warn(sm.getString("containerBase.backgroundProcess.realm", realm), e); } } Valve current = pipeline.getFirst(); while (current != null) { try { current.backgroundProcess(); } catch (Exception e) { log.warn(sm.getString("containerBase.backgroundProcess.valve", current), e); } current = current.getNext(); } fireLifecycleEvent(PERIODIC_EVENT, null); } @Override public File getCatalinaBase() { if (parent == null) { return null; } return parent.getCatalinaBase(); } @Override public File getCatalinaHome() { if (parent == null) { return null; } return parent.getCatalinaHome(); } // ------------------------------------------------------ Protected Methods @Override public void fireContainerEvent(String type, Object data) { if (listeners.isEmpty()) { return; } ContainerEvent event = new ContainerEvent(this, type, data); // Note for each uses an iterator internally so this is safe for (ContainerListener listener : listeners) { listener.containerEvent(event); } } // -------------------- JMX and Registration -------------------- @Override protected String getDomainInternal() { Container p = this.getParent(); if (p == null) { return null; } else { return p.getDomain(); } } @Override public String getMBeanKeyProperties() { Container c = this; StringBuilder keyProperties = new StringBuilder(); int containerCount = 0; // Work up container hierarchy, add a component to the name for // each container while (!(c instanceof Engine)) { if (c instanceof Wrapper) { keyProperties.insert(0, ",servlet="); keyProperties.insert(9, c.getName()); } else if (c instanceof Context) { keyProperties.insert(0, ",context="); ContextName cn = new ContextName(c.getName(), false); keyProperties.insert(9, cn.getDisplayName()); } else if (c instanceof Host) { keyProperties.insert(0, ",host="); keyProperties.insert(6, c.getName()); } else if (c == null) { // May happen in unit testing and/or some embedding scenarios keyProperties.append(",container"); keyProperties.append(containerCount); keyProperties.append("=null"); break; } else { // Should never happen... keyProperties.append(",container"); keyProperties.append(containerCount++); keyProperties.append('='); keyProperties.append(c.getName()); } c = c.getParent(); } return keyProperties.toString(); } public ObjectName[] getChildren() { List<ObjectName> names; childrenLock.readLock().lock(); try { names = new ArrayList<>(children.size()); for (Container next : children.values()) { if (next instanceof ContainerBase) { names.add(next.getObjectName()); } } } finally { childrenLock.readLock().unlock(); } return names.toArray(new ObjectName[0]); } // -------------------- Background Thread -------------------- /** * Start the background thread that will periodically check for session timeouts. */ protected void threadStart() { if (backgroundProcessorDelay > 0 && (getState().isAvailable() || LifecycleState.STARTING_PREP.equals(getState())) && (backgroundProcessorFuture == null || backgroundProcessorFuture.isDone())) { if (backgroundProcessorFuture != null && backgroundProcessorFuture.isDone()) { // There was an error executing the scheduled task, get it and log it try { backgroundProcessorFuture.get(); } catch (InterruptedException | ExecutionException e) { log.error(sm.getString("containerBase.backgroundProcess.error"), e); } } backgroundProcessorFuture = Container.getService(this).getServer().getUtilityExecutor() .scheduleWithFixedDelay(new ContainerBackgroundProcessor(), backgroundProcessorDelay, backgroundProcessorDelay, TimeUnit.SECONDS); } } /** * Stop the background thread that is periodically checking for session timeouts. */ protected void threadStop() { if (backgroundProcessorFuture != null) { backgroundProcessorFuture.cancel(true); backgroundProcessorFuture = null; } } @Override public final String toString() { StringBuilder sb = new StringBuilder(); Container parent = getParent(); if (parent != null) { sb.append(parent); sb.append('.'); } sb.append(this.getClass().getSimpleName()); sb.append('['); sb.append(getName()); sb.append(']'); return sb.toString(); } // ------------------------------- ContainerBackgroundProcessor Inner Class protected class ContainerBackgroundProcessorMonitor implements Runnable { @Override public void run() { if (getState().isAvailable()) { threadStart(); } } } /** * Private runnable class to invoke the backgroundProcess method of this container and its children after a fixed * delay. */ protected class ContainerBackgroundProcessor implements Runnable { @Override public void run() { processChildren(ContainerBase.this); } protected void processChildren(Container container) { ClassLoader originalClassLoader = null; try { if (container instanceof Context) { Loader loader = ((Context) container).getLoader(); // Loader will be null for FailedContext instances if (loader == null) { return; } // Ensure background processing for Contexts and Wrappers // is performed under the web app's class loader originalClassLoader = ((Context) container).bind(null); } container.backgroundProcess(); Container[] children = container.findChildren(); for (Container child : children) { if (child.getBackgroundProcessorDelay() <= 0) { processChildren(child); } } } catch (Throwable t) { ExceptionUtils.handleThrowable(t); log.error(sm.getString("containerBase.backgroundProcess.error"), t); } finally { if (container instanceof Context) { ((Context) container).unbind(originalClassLoader); } } } } // ---------------------------- Inner classes used with start/stop Executor private record StartChild(Container child) implements Callable<Void> { @Override public Void call() throws LifecycleException { child.start(); return null; } } private record StopChild(Container child) implements Callable<Void> { @Override public Void call() throws LifecycleException { if (child.getState().isAvailable()) { child.stop(); } return null; } } }
googleapis/google-cloud-java
35,875
java-securitycenter/proto-google-cloud-securitycenter-v1/src/main/java/com/google/cloud/securitycenter/v1/ListSourcesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/securitycenter/v1/securitycenter_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.securitycenter.v1; /** * * * <pre> * Response message for listing sources. * </pre> * * Protobuf type {@code google.cloud.securitycenter.v1.ListSourcesResponse} */ public final class ListSourcesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.securitycenter.v1.ListSourcesResponse) ListSourcesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListSourcesResponse.newBuilder() to construct. private ListSourcesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListSourcesResponse() { sources_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListSourcesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securitycenter.v1.SecuritycenterService .internal_static_google_cloud_securitycenter_v1_ListSourcesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securitycenter.v1.SecuritycenterService .internal_static_google_cloud_securitycenter_v1_ListSourcesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securitycenter.v1.ListSourcesResponse.class, com.google.cloud.securitycenter.v1.ListSourcesResponse.Builder.class); } public static final int SOURCES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.securitycenter.v1.Source> sources_; /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v1.Source sources = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.securitycenter.v1.Source> getSourcesList() { return sources_; } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v1.Source sources = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.securitycenter.v1.SourceOrBuilder> getSourcesOrBuilderList() { return sources_; } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v1.Source sources = 1;</code> */ @java.lang.Override public int getSourcesCount() { return sources_.size(); } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v1.Source sources = 1;</code> */ @java.lang.Override public com.google.cloud.securitycenter.v1.Source getSources(int index) { return sources_.get(index); } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v1.Source sources = 1;</code> */ @java.lang.Override public com.google.cloud.securitycenter.v1.SourceOrBuilder getSourcesOrBuilder(int index) { return sources_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < sources_.size(); i++) { output.writeMessage(1, sources_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < sources_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, sources_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.securitycenter.v1.ListSourcesResponse)) { return super.equals(obj); } com.google.cloud.securitycenter.v1.ListSourcesResponse other = (com.google.cloud.securitycenter.v1.ListSourcesResponse) obj; if (!getSourcesList().equals(other.getSourcesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getSourcesCount() > 0) { hash = (37 * hash) + SOURCES_FIELD_NUMBER; hash = (53 * hash) + getSourcesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.securitycenter.v1.ListSourcesResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v1.ListSourcesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v1.ListSourcesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v1.ListSourcesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v1.ListSourcesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v1.ListSourcesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v1.ListSourcesResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v1.ListSourcesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securitycenter.v1.ListSourcesResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v1.ListSourcesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securitycenter.v1.ListSourcesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v1.ListSourcesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.securitycenter.v1.ListSourcesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for listing sources. * </pre> * * Protobuf type {@code google.cloud.securitycenter.v1.ListSourcesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.securitycenter.v1.ListSourcesResponse) com.google.cloud.securitycenter.v1.ListSourcesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securitycenter.v1.SecuritycenterService .internal_static_google_cloud_securitycenter_v1_ListSourcesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securitycenter.v1.SecuritycenterService .internal_static_google_cloud_securitycenter_v1_ListSourcesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securitycenter.v1.ListSourcesResponse.class, com.google.cloud.securitycenter.v1.ListSourcesResponse.Builder.class); } // Construct using com.google.cloud.securitycenter.v1.ListSourcesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (sourcesBuilder_ == null) { sources_ = java.util.Collections.emptyList(); } else { sources_ = null; sourcesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.securitycenter.v1.SecuritycenterService .internal_static_google_cloud_securitycenter_v1_ListSourcesResponse_descriptor; } @java.lang.Override public com.google.cloud.securitycenter.v1.ListSourcesResponse getDefaultInstanceForType() { return com.google.cloud.securitycenter.v1.ListSourcesResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.securitycenter.v1.ListSourcesResponse build() { com.google.cloud.securitycenter.v1.ListSourcesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.securitycenter.v1.ListSourcesResponse buildPartial() { com.google.cloud.securitycenter.v1.ListSourcesResponse result = new com.google.cloud.securitycenter.v1.ListSourcesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.securitycenter.v1.ListSourcesResponse result) { if (sourcesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { sources_ = java.util.Collections.unmodifiableList(sources_); bitField0_ = (bitField0_ & ~0x00000001); } result.sources_ = sources_; } else { result.sources_ = sourcesBuilder_.build(); } } private void buildPartial0(com.google.cloud.securitycenter.v1.ListSourcesResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.securitycenter.v1.ListSourcesResponse) { return mergeFrom((com.google.cloud.securitycenter.v1.ListSourcesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.securitycenter.v1.ListSourcesResponse other) { if (other == com.google.cloud.securitycenter.v1.ListSourcesResponse.getDefaultInstance()) return this; if (sourcesBuilder_ == null) { if (!other.sources_.isEmpty()) { if (sources_.isEmpty()) { sources_ = other.sources_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureSourcesIsMutable(); sources_.addAll(other.sources_); } onChanged(); } } else { if (!other.sources_.isEmpty()) { if (sourcesBuilder_.isEmpty()) { sourcesBuilder_.dispose(); sourcesBuilder_ = null; sources_ = other.sources_; bitField0_ = (bitField0_ & ~0x00000001); sourcesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getSourcesFieldBuilder() : null; } else { sourcesBuilder_.addAllMessages(other.sources_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.securitycenter.v1.Source m = input.readMessage( com.google.cloud.securitycenter.v1.Source.parser(), extensionRegistry); if (sourcesBuilder_ == null) { ensureSourcesIsMutable(); sources_.add(m); } else { sourcesBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.securitycenter.v1.Source> sources_ = java.util.Collections.emptyList(); private void ensureSourcesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { sources_ = new java.util.ArrayList<com.google.cloud.securitycenter.v1.Source>(sources_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.securitycenter.v1.Source, com.google.cloud.securitycenter.v1.Source.Builder, com.google.cloud.securitycenter.v1.SourceOrBuilder> sourcesBuilder_; /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v1.Source sources = 1;</code> */ public java.util.List<com.google.cloud.securitycenter.v1.Source> getSourcesList() { if (sourcesBuilder_ == null) { return java.util.Collections.unmodifiableList(sources_); } else { return sourcesBuilder_.getMessageList(); } } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v1.Source sources = 1;</code> */ public int getSourcesCount() { if (sourcesBuilder_ == null) { return sources_.size(); } else { return sourcesBuilder_.getCount(); } } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v1.Source sources = 1;</code> */ public com.google.cloud.securitycenter.v1.Source getSources(int index) { if (sourcesBuilder_ == null) { return sources_.get(index); } else { return sourcesBuilder_.getMessage(index); } } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v1.Source sources = 1;</code> */ public Builder setSources(int index, com.google.cloud.securitycenter.v1.Source value) { if (sourcesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSourcesIsMutable(); sources_.set(index, value); onChanged(); } else { sourcesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v1.Source sources = 1;</code> */ public Builder setSources( int index, com.google.cloud.securitycenter.v1.Source.Builder builderForValue) { if (sourcesBuilder_ == null) { ensureSourcesIsMutable(); sources_.set(index, builderForValue.build()); onChanged(); } else { sourcesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v1.Source sources = 1;</code> */ public Builder addSources(com.google.cloud.securitycenter.v1.Source value) { if (sourcesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSourcesIsMutable(); sources_.add(value); onChanged(); } else { sourcesBuilder_.addMessage(value); } return this; } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v1.Source sources = 1;</code> */ public Builder addSources(int index, com.google.cloud.securitycenter.v1.Source value) { if (sourcesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSourcesIsMutable(); sources_.add(index, value); onChanged(); } else { sourcesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v1.Source sources = 1;</code> */ public Builder addSources(com.google.cloud.securitycenter.v1.Source.Builder builderForValue) { if (sourcesBuilder_ == null) { ensureSourcesIsMutable(); sources_.add(builderForValue.build()); onChanged(); } else { sourcesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v1.Source sources = 1;</code> */ public Builder addSources( int index, com.google.cloud.securitycenter.v1.Source.Builder builderForValue) { if (sourcesBuilder_ == null) { ensureSourcesIsMutable(); sources_.add(index, builderForValue.build()); onChanged(); } else { sourcesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v1.Source sources = 1;</code> */ public Builder addAllSources( java.lang.Iterable<? extends com.google.cloud.securitycenter.v1.Source> values) { if (sourcesBuilder_ == null) { ensureSourcesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, sources_); onChanged(); } else { sourcesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v1.Source sources = 1;</code> */ public Builder clearSources() { if (sourcesBuilder_ == null) { sources_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { sourcesBuilder_.clear(); } return this; } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v1.Source sources = 1;</code> */ public Builder removeSources(int index) { if (sourcesBuilder_ == null) { ensureSourcesIsMutable(); sources_.remove(index); onChanged(); } else { sourcesBuilder_.remove(index); } return this; } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v1.Source sources = 1;</code> */ public com.google.cloud.securitycenter.v1.Source.Builder getSourcesBuilder(int index) { return getSourcesFieldBuilder().getBuilder(index); } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v1.Source sources = 1;</code> */ public com.google.cloud.securitycenter.v1.SourceOrBuilder getSourcesOrBuilder(int index) { if (sourcesBuilder_ == null) { return sources_.get(index); } else { return sourcesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v1.Source sources = 1;</code> */ public java.util.List<? extends com.google.cloud.securitycenter.v1.SourceOrBuilder> getSourcesOrBuilderList() { if (sourcesBuilder_ != null) { return sourcesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(sources_); } } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v1.Source sources = 1;</code> */ public com.google.cloud.securitycenter.v1.Source.Builder addSourcesBuilder() { return getSourcesFieldBuilder() .addBuilder(com.google.cloud.securitycenter.v1.Source.getDefaultInstance()); } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v1.Source sources = 1;</code> */ public com.google.cloud.securitycenter.v1.Source.Builder addSourcesBuilder(int index) { return getSourcesFieldBuilder() .addBuilder(index, com.google.cloud.securitycenter.v1.Source.getDefaultInstance()); } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v1.Source sources = 1;</code> */ public java.util.List<com.google.cloud.securitycenter.v1.Source.Builder> getSourcesBuilderList() { return getSourcesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.securitycenter.v1.Source, com.google.cloud.securitycenter.v1.Source.Builder, com.google.cloud.securitycenter.v1.SourceOrBuilder> getSourcesFieldBuilder() { if (sourcesBuilder_ == null) { sourcesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.securitycenter.v1.Source, com.google.cloud.securitycenter.v1.Source.Builder, com.google.cloud.securitycenter.v1.SourceOrBuilder>( sources_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); sources_ = null; } return sourcesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.securitycenter.v1.ListSourcesResponse) } // @@protoc_insertion_point(class_scope:google.cloud.securitycenter.v1.ListSourcesResponse) private static final com.google.cloud.securitycenter.v1.ListSourcesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.securitycenter.v1.ListSourcesResponse(); } public static com.google.cloud.securitycenter.v1.ListSourcesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListSourcesResponse> PARSER = new com.google.protobuf.AbstractParser<ListSourcesResponse>() { @java.lang.Override public ListSourcesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListSourcesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListSourcesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.securitycenter.v1.ListSourcesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/sdk-platform-java
36,115
java-iam/grpc-google-iam-v2beta/src/main/java/com/google/iam/v2beta/PoliciesGrpc.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.iam.v2beta; import static io.grpc.MethodDescriptor.generateFullMethodName; /** * * * <pre> * An interface for managing Identity and Access Management (IAM) policies. * </pre> */ @javax.annotation.Generated( value = "by gRPC proto compiler", comments = "Source: google/iam/v2beta/policy.proto") @io.grpc.stub.annotations.GrpcGenerated public final class PoliciesGrpc { private PoliciesGrpc() {} public static final java.lang.String SERVICE_NAME = "google.iam.v2beta.Policies"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor< com.google.iam.v2beta.ListPoliciesRequest, com.google.iam.v2beta.ListPoliciesResponse> getListPoliciesMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "ListPolicies", requestType = com.google.iam.v2beta.ListPoliciesRequest.class, responseType = com.google.iam.v2beta.ListPoliciesResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.iam.v2beta.ListPoliciesRequest, com.google.iam.v2beta.ListPoliciesResponse> getListPoliciesMethod() { io.grpc.MethodDescriptor< com.google.iam.v2beta.ListPoliciesRequest, com.google.iam.v2beta.ListPoliciesResponse> getListPoliciesMethod; if ((getListPoliciesMethod = PoliciesGrpc.getListPoliciesMethod) == null) { synchronized (PoliciesGrpc.class) { if ((getListPoliciesMethod = PoliciesGrpc.getListPoliciesMethod) == null) { PoliciesGrpc.getListPoliciesMethod = getListPoliciesMethod = io.grpc.MethodDescriptor .<com.google.iam.v2beta.ListPoliciesRequest, com.google.iam.v2beta.ListPoliciesResponse> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "ListPolicies")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.iam.v2beta.ListPoliciesRequest.getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.iam.v2beta.ListPoliciesResponse.getDefaultInstance())) .setSchemaDescriptor(new PoliciesMethodDescriptorSupplier("ListPolicies")) .build(); } } } return getListPoliciesMethod; } private static volatile io.grpc.MethodDescriptor< com.google.iam.v2beta.GetPolicyRequest, com.google.iam.v2beta.Policy> getGetPolicyMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "GetPolicy", requestType = com.google.iam.v2beta.GetPolicyRequest.class, responseType = com.google.iam.v2beta.Policy.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.iam.v2beta.GetPolicyRequest, com.google.iam.v2beta.Policy> getGetPolicyMethod() { io.grpc.MethodDescriptor<com.google.iam.v2beta.GetPolicyRequest, com.google.iam.v2beta.Policy> getGetPolicyMethod; if ((getGetPolicyMethod = PoliciesGrpc.getGetPolicyMethod) == null) { synchronized (PoliciesGrpc.class) { if ((getGetPolicyMethod = PoliciesGrpc.getGetPolicyMethod) == null) { PoliciesGrpc.getGetPolicyMethod = getGetPolicyMethod = io.grpc.MethodDescriptor .<com.google.iam.v2beta.GetPolicyRequest, com.google.iam.v2beta.Policy> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "GetPolicy")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.iam.v2beta.GetPolicyRequest.getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.iam.v2beta.Policy.getDefaultInstance())) .setSchemaDescriptor(new PoliciesMethodDescriptorSupplier("GetPolicy")) .build(); } } } return getGetPolicyMethod; } private static volatile io.grpc.MethodDescriptor< com.google.iam.v2beta.CreatePolicyRequest, com.google.longrunning.Operation> getCreatePolicyMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "CreatePolicy", requestType = com.google.iam.v2beta.CreatePolicyRequest.class, responseType = com.google.longrunning.Operation.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.iam.v2beta.CreatePolicyRequest, com.google.longrunning.Operation> getCreatePolicyMethod() { io.grpc.MethodDescriptor< com.google.iam.v2beta.CreatePolicyRequest, com.google.longrunning.Operation> getCreatePolicyMethod; if ((getCreatePolicyMethod = PoliciesGrpc.getCreatePolicyMethod) == null) { synchronized (PoliciesGrpc.class) { if ((getCreatePolicyMethod = PoliciesGrpc.getCreatePolicyMethod) == null) { PoliciesGrpc.getCreatePolicyMethod = getCreatePolicyMethod = io.grpc.MethodDescriptor .<com.google.iam.v2beta.CreatePolicyRequest, com.google.longrunning.Operation> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "CreatePolicy")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.iam.v2beta.CreatePolicyRequest.getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.longrunning.Operation.getDefaultInstance())) .setSchemaDescriptor(new PoliciesMethodDescriptorSupplier("CreatePolicy")) .build(); } } } return getCreatePolicyMethod; } private static volatile io.grpc.MethodDescriptor< com.google.iam.v2beta.UpdatePolicyRequest, com.google.longrunning.Operation> getUpdatePolicyMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "UpdatePolicy", requestType = com.google.iam.v2beta.UpdatePolicyRequest.class, responseType = com.google.longrunning.Operation.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.iam.v2beta.UpdatePolicyRequest, com.google.longrunning.Operation> getUpdatePolicyMethod() { io.grpc.MethodDescriptor< com.google.iam.v2beta.UpdatePolicyRequest, com.google.longrunning.Operation> getUpdatePolicyMethod; if ((getUpdatePolicyMethod = PoliciesGrpc.getUpdatePolicyMethod) == null) { synchronized (PoliciesGrpc.class) { if ((getUpdatePolicyMethod = PoliciesGrpc.getUpdatePolicyMethod) == null) { PoliciesGrpc.getUpdatePolicyMethod = getUpdatePolicyMethod = io.grpc.MethodDescriptor .<com.google.iam.v2beta.UpdatePolicyRequest, com.google.longrunning.Operation> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "UpdatePolicy")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.iam.v2beta.UpdatePolicyRequest.getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.longrunning.Operation.getDefaultInstance())) .setSchemaDescriptor(new PoliciesMethodDescriptorSupplier("UpdatePolicy")) .build(); } } } return getUpdatePolicyMethod; } private static volatile io.grpc.MethodDescriptor< com.google.iam.v2beta.DeletePolicyRequest, com.google.longrunning.Operation> getDeletePolicyMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "DeletePolicy", requestType = com.google.iam.v2beta.DeletePolicyRequest.class, responseType = com.google.longrunning.Operation.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.iam.v2beta.DeletePolicyRequest, com.google.longrunning.Operation> getDeletePolicyMethod() { io.grpc.MethodDescriptor< com.google.iam.v2beta.DeletePolicyRequest, com.google.longrunning.Operation> getDeletePolicyMethod; if ((getDeletePolicyMethod = PoliciesGrpc.getDeletePolicyMethod) == null) { synchronized (PoliciesGrpc.class) { if ((getDeletePolicyMethod = PoliciesGrpc.getDeletePolicyMethod) == null) { PoliciesGrpc.getDeletePolicyMethod = getDeletePolicyMethod = io.grpc.MethodDescriptor .<com.google.iam.v2beta.DeletePolicyRequest, com.google.longrunning.Operation> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "DeletePolicy")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.iam.v2beta.DeletePolicyRequest.getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.longrunning.Operation.getDefaultInstance())) .setSchemaDescriptor(new PoliciesMethodDescriptorSupplier("DeletePolicy")) .build(); } } } return getDeletePolicyMethod; } /** Creates a new async stub that supports all call types for the service */ public static PoliciesStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<PoliciesStub> factory = new io.grpc.stub.AbstractStub.StubFactory<PoliciesStub>() { @java.lang.Override public PoliciesStub newStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new PoliciesStub(channel, callOptions); } }; return PoliciesStub.newStub(factory, channel); } /** Creates a new blocking-style stub that supports all types of calls on the service */ public static PoliciesBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<PoliciesBlockingV2Stub> factory = new io.grpc.stub.AbstractStub.StubFactory<PoliciesBlockingV2Stub>() { @java.lang.Override public PoliciesBlockingV2Stub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new PoliciesBlockingV2Stub(channel, callOptions); } }; return PoliciesBlockingV2Stub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static PoliciesBlockingStub newBlockingStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<PoliciesBlockingStub> factory = new io.grpc.stub.AbstractStub.StubFactory<PoliciesBlockingStub>() { @java.lang.Override public PoliciesBlockingStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new PoliciesBlockingStub(channel, callOptions); } }; return PoliciesBlockingStub.newStub(factory, channel); } /** Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static PoliciesFutureStub newFutureStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<PoliciesFutureStub> factory = new io.grpc.stub.AbstractStub.StubFactory<PoliciesFutureStub>() { @java.lang.Override public PoliciesFutureStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new PoliciesFutureStub(channel, callOptions); } }; return PoliciesFutureStub.newStub(factory, channel); } /** * * * <pre> * An interface for managing Identity and Access Management (IAM) policies. * </pre> */ public interface AsyncService { /** * * * <pre> * Retrieves the policies of the specified kind that are attached to a * resource. * The response lists only policy metadata. In particular, policy rules are * omitted. * </pre> */ default void listPolicies( com.google.iam.v2beta.ListPoliciesRequest request, io.grpc.stub.StreamObserver<com.google.iam.v2beta.ListPoliciesResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getListPoliciesMethod(), responseObserver); } /** * * * <pre> * Gets a policy. * </pre> */ default void getPolicy( com.google.iam.v2beta.GetPolicyRequest request, io.grpc.stub.StreamObserver<com.google.iam.v2beta.Policy> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getGetPolicyMethod(), responseObserver); } /** * * * <pre> * Creates a policy. * </pre> */ default void createPolicy( com.google.iam.v2beta.CreatePolicyRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getCreatePolicyMethod(), responseObserver); } /** * * * <pre> * Updates the specified policy. * You can update only the rules and the display name for the policy. * To update a policy, you should use a read-modify-write loop: * 1. Use [GetPolicy][google.iam.v2beta.Policies.GetPolicy] to read the current version of the policy. * 2. Modify the policy as needed. * 3. Use `UpdatePolicy` to write the updated policy. * This pattern helps prevent conflicts between concurrent updates. * </pre> */ default void updatePolicy( com.google.iam.v2beta.UpdatePolicyRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getUpdatePolicyMethod(), responseObserver); } /** * * * <pre> * Deletes a policy. This action is permanent. * </pre> */ default void deletePolicy( com.google.iam.v2beta.DeletePolicyRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getDeletePolicyMethod(), responseObserver); } } /** * Base class for the server implementation of the service Policies. * * <pre> * An interface for managing Identity and Access Management (IAM) policies. * </pre> */ public abstract static class PoliciesImplBase implements io.grpc.BindableService, AsyncService { @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return PoliciesGrpc.bindService(this); } } /** * A stub to allow clients to do asynchronous rpc calls to service Policies. * * <pre> * An interface for managing Identity and Access Management (IAM) policies. * </pre> */ public static final class PoliciesStub extends io.grpc.stub.AbstractAsyncStub<PoliciesStub> { private PoliciesStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected PoliciesStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new PoliciesStub(channel, callOptions); } /** * * * <pre> * Retrieves the policies of the specified kind that are attached to a * resource. * The response lists only policy metadata. In particular, policy rules are * omitted. * </pre> */ public void listPolicies( com.google.iam.v2beta.ListPoliciesRequest request, io.grpc.stub.StreamObserver<com.google.iam.v2beta.ListPoliciesResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getListPoliciesMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Gets a policy. * </pre> */ public void getPolicy( com.google.iam.v2beta.GetPolicyRequest request, io.grpc.stub.StreamObserver<com.google.iam.v2beta.Policy> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getGetPolicyMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Creates a policy. * </pre> */ public void createPolicy( com.google.iam.v2beta.CreatePolicyRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getCreatePolicyMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Updates the specified policy. * You can update only the rules and the display name for the policy. * To update a policy, you should use a read-modify-write loop: * 1. Use [GetPolicy][google.iam.v2beta.Policies.GetPolicy] to read the current version of the policy. * 2. Modify the policy as needed. * 3. Use `UpdatePolicy` to write the updated policy. * This pattern helps prevent conflicts between concurrent updates. * </pre> */ public void updatePolicy( com.google.iam.v2beta.UpdatePolicyRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getUpdatePolicyMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Deletes a policy. This action is permanent. * </pre> */ public void deletePolicy( com.google.iam.v2beta.DeletePolicyRequest request, io.grpc.stub.StreamObserver<com.google.longrunning.Operation> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getDeletePolicyMethod(), getCallOptions()), request, responseObserver); } } /** * A stub to allow clients to do synchronous rpc calls to service Policies. * * <pre> * An interface for managing Identity and Access Management (IAM) policies. * </pre> */ public static final class PoliciesBlockingV2Stub extends io.grpc.stub.AbstractBlockingStub<PoliciesBlockingV2Stub> { private PoliciesBlockingV2Stub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected PoliciesBlockingV2Stub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new PoliciesBlockingV2Stub(channel, callOptions); } /** * * * <pre> * Retrieves the policies of the specified kind that are attached to a * resource. * The response lists only policy metadata. In particular, policy rules are * omitted. * </pre> */ public com.google.iam.v2beta.ListPoliciesResponse listPolicies( com.google.iam.v2beta.ListPoliciesRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListPoliciesMethod(), getCallOptions(), request); } /** * * * <pre> * Gets a policy. * </pre> */ public com.google.iam.v2beta.Policy getPolicy(com.google.iam.v2beta.GetPolicyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetPolicyMethod(), getCallOptions(), request); } /** * * * <pre> * Creates a policy. * </pre> */ public com.google.longrunning.Operation createPolicy( com.google.iam.v2beta.CreatePolicyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getCreatePolicyMethod(), getCallOptions(), request); } /** * * * <pre> * Updates the specified policy. * You can update only the rules and the display name for the policy. * To update a policy, you should use a read-modify-write loop: * 1. Use [GetPolicy][google.iam.v2beta.Policies.GetPolicy] to read the current version of the policy. * 2. Modify the policy as needed. * 3. Use `UpdatePolicy` to write the updated policy. * This pattern helps prevent conflicts between concurrent updates. * </pre> */ public com.google.longrunning.Operation updatePolicy( com.google.iam.v2beta.UpdatePolicyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getUpdatePolicyMethod(), getCallOptions(), request); } /** * * * <pre> * Deletes a policy. This action is permanent. * </pre> */ public com.google.longrunning.Operation deletePolicy( com.google.iam.v2beta.DeletePolicyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDeletePolicyMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do limited synchronous rpc calls to service Policies. * * <pre> * An interface for managing Identity and Access Management (IAM) policies. * </pre> */ public static final class PoliciesBlockingStub extends io.grpc.stub.AbstractBlockingStub<PoliciesBlockingStub> { private PoliciesBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected PoliciesBlockingStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new PoliciesBlockingStub(channel, callOptions); } /** * * * <pre> * Retrieves the policies of the specified kind that are attached to a * resource. * The response lists only policy metadata. In particular, policy rules are * omitted. * </pre> */ public com.google.iam.v2beta.ListPoliciesResponse listPolicies( com.google.iam.v2beta.ListPoliciesRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getListPoliciesMethod(), getCallOptions(), request); } /** * * * <pre> * Gets a policy. * </pre> */ public com.google.iam.v2beta.Policy getPolicy(com.google.iam.v2beta.GetPolicyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetPolicyMethod(), getCallOptions(), request); } /** * * * <pre> * Creates a policy. * </pre> */ public com.google.longrunning.Operation createPolicy( com.google.iam.v2beta.CreatePolicyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getCreatePolicyMethod(), getCallOptions(), request); } /** * * * <pre> * Updates the specified policy. * You can update only the rules and the display name for the policy. * To update a policy, you should use a read-modify-write loop: * 1. Use [GetPolicy][google.iam.v2beta.Policies.GetPolicy] to read the current version of the policy. * 2. Modify the policy as needed. * 3. Use `UpdatePolicy` to write the updated policy. * This pattern helps prevent conflicts between concurrent updates. * </pre> */ public com.google.longrunning.Operation updatePolicy( com.google.iam.v2beta.UpdatePolicyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getUpdatePolicyMethod(), getCallOptions(), request); } /** * * * <pre> * Deletes a policy. This action is permanent. * </pre> */ public com.google.longrunning.Operation deletePolicy( com.google.iam.v2beta.DeletePolicyRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDeletePolicyMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do ListenableFuture-style rpc calls to service Policies. * * <pre> * An interface for managing Identity and Access Management (IAM) policies. * </pre> */ public static final class PoliciesFutureStub extends io.grpc.stub.AbstractFutureStub<PoliciesFutureStub> { private PoliciesFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected PoliciesFutureStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new PoliciesFutureStub(channel, callOptions); } /** * * * <pre> * Retrieves the policies of the specified kind that are attached to a * resource. * The response lists only policy metadata. In particular, policy rules are * omitted. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.iam.v2beta.ListPoliciesResponse> listPolicies(com.google.iam.v2beta.ListPoliciesRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getListPoliciesMethod(), getCallOptions()), request); } /** * * * <pre> * Gets a policy. * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.iam.v2beta.Policy> getPolicy(com.google.iam.v2beta.GetPolicyRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getGetPolicyMethod(), getCallOptions()), request); } /** * * * <pre> * Creates a policy. * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation> createPolicy(com.google.iam.v2beta.CreatePolicyRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getCreatePolicyMethod(), getCallOptions()), request); } /** * * * <pre> * Updates the specified policy. * You can update only the rules and the display name for the policy. * To update a policy, you should use a read-modify-write loop: * 1. Use [GetPolicy][google.iam.v2beta.Policies.GetPolicy] to read the current version of the policy. * 2. Modify the policy as needed. * 3. Use `UpdatePolicy` to write the updated policy. * This pattern helps prevent conflicts between concurrent updates. * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation> updatePolicy(com.google.iam.v2beta.UpdatePolicyRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getUpdatePolicyMethod(), getCallOptions()), request); } /** * * * <pre> * Deletes a policy. This action is permanent. * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.longrunning.Operation> deletePolicy(com.google.iam.v2beta.DeletePolicyRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getDeletePolicyMethod(), getCallOptions()), request); } } private static final int METHODID_LIST_POLICIES = 0; private static final int METHODID_GET_POLICY = 1; private static final int METHODID_CREATE_POLICY = 2; private static final int METHODID_UPDATE_POLICY = 3; private static final int METHODID_DELETE_POLICY = 4; private static final class MethodHandlers<Req, Resp> implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>, io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> { private final AsyncService serviceImpl; private final int methodId; MethodHandlers(AsyncService serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_LIST_POLICIES: serviceImpl.listPolicies( (com.google.iam.v2beta.ListPoliciesRequest) request, (io.grpc.stub.StreamObserver<com.google.iam.v2beta.ListPoliciesResponse>) responseObserver); break; case METHODID_GET_POLICY: serviceImpl.getPolicy( (com.google.iam.v2beta.GetPolicyRequest) request, (io.grpc.stub.StreamObserver<com.google.iam.v2beta.Policy>) responseObserver); break; case METHODID_CREATE_POLICY: serviceImpl.createPolicy( (com.google.iam.v2beta.CreatePolicyRequest) request, (io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver); break; case METHODID_UPDATE_POLICY: serviceImpl.updatePolicy( (com.google.iam.v2beta.UpdatePolicyRequest) request, (io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver); break; case METHODID_DELETE_POLICY: serviceImpl.deletePolicy( (com.google.iam.v2beta.DeletePolicyRequest) request, (io.grpc.stub.StreamObserver<com.google.longrunning.Operation>) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver<Req> invoke( io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getListPoliciesMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.iam.v2beta.ListPoliciesRequest, com.google.iam.v2beta.ListPoliciesResponse>(service, METHODID_LIST_POLICIES))) .addMethod( getGetPolicyMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.iam.v2beta.GetPolicyRequest, com.google.iam.v2beta.Policy>( service, METHODID_GET_POLICY))) .addMethod( getCreatePolicyMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.iam.v2beta.CreatePolicyRequest, com.google.longrunning.Operation>( service, METHODID_CREATE_POLICY))) .addMethod( getUpdatePolicyMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.iam.v2beta.UpdatePolicyRequest, com.google.longrunning.Operation>( service, METHODID_UPDATE_POLICY))) .addMethod( getDeletePolicyMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.iam.v2beta.DeletePolicyRequest, com.google.longrunning.Operation>( service, METHODID_DELETE_POLICY))) .build(); } private abstract static class PoliciesBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { PoliciesBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.google.iam.v2beta.PolicyProto.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("Policies"); } } private static final class PoliciesFileDescriptorSupplier extends PoliciesBaseDescriptorSupplier { PoliciesFileDescriptorSupplier() {} } private static final class PoliciesMethodDescriptorSupplier extends PoliciesBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final java.lang.String methodName; PoliciesMethodDescriptorSupplier(java.lang.String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (PoliciesGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new PoliciesFileDescriptorSupplier()) .addMethod(getListPoliciesMethod()) .addMethod(getGetPolicyMethod()) .addMethod(getCreatePolicyMethod()) .addMethod(getUpdatePolicyMethod()) .addMethod(getDeletePolicyMethod()) .build(); } } } return result; } }
googleapis/google-cloud-java
35,874
java-aiplatform/proto-google-cloud-aiplatform-v1/src/main/java/com/google/cloud/aiplatform/v1/ListTuningJobsRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/aiplatform/v1/genai_tuning_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.aiplatform.v1; /** * * * <pre> * Request message for * [GenAiTuningService.ListTuningJobs][google.cloud.aiplatform.v1.GenAiTuningService.ListTuningJobs]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.ListTuningJobsRequest} */ public final class ListTuningJobsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.aiplatform.v1.ListTuningJobsRequest) ListTuningJobsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListTuningJobsRequest.newBuilder() to construct. private ListTuningJobsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListTuningJobsRequest() { parent_ = ""; filter_ = ""; pageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListTuningJobsRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.GenAiTuningServiceProto .internal_static_google_cloud_aiplatform_v1_ListTuningJobsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.GenAiTuningServiceProto .internal_static_google_cloud_aiplatform_v1_ListTuningJobsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.ListTuningJobsRequest.class, com.google.cloud.aiplatform.v1.ListTuningJobsRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The resource name of the Location to list the TuningJobs from. * Format: `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The resource name of the Location to list the TuningJobs from. * Format: `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILTER_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object filter_ = ""; /** * * * <pre> * Optional. The standard list filter. * </pre> * * <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ @java.lang.Override public java.lang.String getFilter() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } } /** * * * <pre> * Optional. The standard list filter. * </pre> * * <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ @java.lang.Override public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 3; private int pageSize_ = 0; /** * * * <pre> * Optional. The standard list page size. * </pre> * * <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. The standard list page token. * Typically obtained via * [ListTuningJobsResponse.next_page_token][google.cloud.aiplatform.v1.ListTuningJobsResponse.next_page_token] * of the previous GenAiTuningService.ListTuningJob][] call. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * Optional. The standard list page token. * Typically obtained via * [ListTuningJobsResponse.next_page_token][google.cloud.aiplatform.v1.ListTuningJobsResponse.next_page_token] * of the previous GenAiTuningService.ListTuningJob][] call. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, filter_); } if (pageSize_ != 0) { output.writeInt32(3, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, pageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, filter_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(3, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, pageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.aiplatform.v1.ListTuningJobsRequest)) { return super.equals(obj); } com.google.cloud.aiplatform.v1.ListTuningJobsRequest other = (com.google.cloud.aiplatform.v1.ListTuningJobsRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getFilter().equals(other.getFilter())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.aiplatform.v1.ListTuningJobsRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.ListTuningJobsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ListTuningJobsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.ListTuningJobsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ListTuningJobsRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.aiplatform.v1.ListTuningJobsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ListTuningJobsRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.ListTuningJobsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ListTuningJobsRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.ListTuningJobsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.aiplatform.v1.ListTuningJobsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.aiplatform.v1.ListTuningJobsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.aiplatform.v1.ListTuningJobsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [GenAiTuningService.ListTuningJobs][google.cloud.aiplatform.v1.GenAiTuningService.ListTuningJobs]. * </pre> * * Protobuf type {@code google.cloud.aiplatform.v1.ListTuningJobsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.aiplatform.v1.ListTuningJobsRequest) com.google.cloud.aiplatform.v1.ListTuningJobsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.aiplatform.v1.GenAiTuningServiceProto .internal_static_google_cloud_aiplatform_v1_ListTuningJobsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.aiplatform.v1.GenAiTuningServiceProto .internal_static_google_cloud_aiplatform_v1_ListTuningJobsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.aiplatform.v1.ListTuningJobsRequest.class, com.google.cloud.aiplatform.v1.ListTuningJobsRequest.Builder.class); } // Construct using com.google.cloud.aiplatform.v1.ListTuningJobsRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; filter_ = ""; pageSize_ = 0; pageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.aiplatform.v1.GenAiTuningServiceProto .internal_static_google_cloud_aiplatform_v1_ListTuningJobsRequest_descriptor; } @java.lang.Override public com.google.cloud.aiplatform.v1.ListTuningJobsRequest getDefaultInstanceForType() { return com.google.cloud.aiplatform.v1.ListTuningJobsRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.aiplatform.v1.ListTuningJobsRequest build() { com.google.cloud.aiplatform.v1.ListTuningJobsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.aiplatform.v1.ListTuningJobsRequest buildPartial() { com.google.cloud.aiplatform.v1.ListTuningJobsRequest result = new com.google.cloud.aiplatform.v1.ListTuningJobsRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.aiplatform.v1.ListTuningJobsRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.filter_ = filter_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.pageToken_ = pageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.aiplatform.v1.ListTuningJobsRequest) { return mergeFrom((com.google.cloud.aiplatform.v1.ListTuningJobsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.aiplatform.v1.ListTuningJobsRequest other) { if (other == com.google.cloud.aiplatform.v1.ListTuningJobsRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getFilter().isEmpty()) { filter_ = other.filter_; bitField0_ |= 0x00000002; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { filter_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 24: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000004; break; } // case 24 case 34: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The resource name of the Location to list the TuningJobs from. * Format: `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The resource name of the Location to list the TuningJobs from. * Format: `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The resource name of the Location to list the TuningJobs from. * Format: `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The resource name of the Location to list the TuningJobs from. * Format: `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The resource name of the Location to list the TuningJobs from. * Format: `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object filter_ = ""; /** * * * <pre> * Optional. The standard list filter. * </pre> * * <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ public java.lang.String getFilter() { java.lang.Object ref = filter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. The standard list filter. * </pre> * * <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. The standard list filter. * </pre> * * <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The filter to set. * @return This builder for chaining. */ public Builder setFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } filter_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. The standard list filter. * </pre> * * <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearFilter() { filter_ = getDefaultInstance().getFilter(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Optional. The standard list filter. * </pre> * * <code>string filter = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for filter to set. * @return This builder for chaining. */ public Builder setFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); filter_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Optional. The standard list page size. * </pre> * * <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Optional. The standard list page size. * </pre> * * <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. The standard list page size. * </pre> * * <code>int32 page_size = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000004); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. The standard list page token. * Typically obtained via * [ListTuningJobsResponse.next_page_token][google.cloud.aiplatform.v1.ListTuningJobsResponse.next_page_token] * of the previous GenAiTuningService.ListTuningJob][] call. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. The standard list page token. * Typically obtained via * [ListTuningJobsResponse.next_page_token][google.cloud.aiplatform.v1.ListTuningJobsResponse.next_page_token] * of the previous GenAiTuningService.ListTuningJob][] call. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. The standard list page token. * Typically obtained via * [ListTuningJobsResponse.next_page_token][google.cloud.aiplatform.v1.ListTuningJobsResponse.next_page_token] * of the previous GenAiTuningService.ListTuningJob][] call. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Optional. The standard list page token. * Typically obtained via * [ListTuningJobsResponse.next_page_token][google.cloud.aiplatform.v1.ListTuningJobsResponse.next_page_token] * of the previous GenAiTuningService.ListTuningJob][] call. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Optional. The standard list page token. * Typically obtained via * [ListTuningJobsResponse.next_page_token][google.cloud.aiplatform.v1.ListTuningJobsResponse.next_page_token] * of the previous GenAiTuningService.ListTuningJob][] call. * </pre> * * <code>string page_token = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.aiplatform.v1.ListTuningJobsRequest) } // @@protoc_insertion_point(class_scope:google.cloud.aiplatform.v1.ListTuningJobsRequest) private static final com.google.cloud.aiplatform.v1.ListTuningJobsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.aiplatform.v1.ListTuningJobsRequest(); } public static com.google.cloud.aiplatform.v1.ListTuningJobsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListTuningJobsRequest> PARSER = new com.google.protobuf.AbstractParser<ListTuningJobsRequest>() { @java.lang.Override public ListTuningJobsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListTuningJobsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListTuningJobsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.aiplatform.v1.ListTuningJobsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/kafka
36,005
coordinator-common/src/test/java/org/apache/kafka/coordinator/common/runtime/CoordinatorLoaderImplTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.kafka.coordinator.common.runtime; import org.apache.kafka.common.TopicPartition; import org.apache.kafka.common.compress.Compression; import org.apache.kafka.common.errors.NotLeaderOrFollowerException; import org.apache.kafka.common.record.ControlRecordType; import org.apache.kafka.common.record.EndTransactionMarker; import org.apache.kafka.common.record.FileRecords; import org.apache.kafka.common.record.MemoryRecords; import org.apache.kafka.common.record.RecordBatch; import org.apache.kafka.common.record.SimpleRecord; import org.apache.kafka.common.requests.TransactionResult; import org.apache.kafka.common.utils.MockTime; import org.apache.kafka.common.utils.Time; import org.apache.kafka.server.storage.log.FetchIsolation; import org.apache.kafka.storage.internals.log.FetchDataInfo; import org.apache.kafka.storage.internals.log.LogOffsetMetadata; import org.apache.kafka.storage.internals.log.UnifiedLog; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.Timeout; import org.mockito.ArgumentMatchers; import org.mockito.invocation.InvocationOnMock; import java.io.IOException; import java.nio.ByteBuffer; import java.nio.charset.StandardCharsets; import java.util.Arrays; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.concurrent.CompletableFuture; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.function.Function; import static org.apache.kafka.test.TestUtils.assertFutureThrows; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertNotNull; import static org.junit.jupiter.api.Assertions.assertTrue; import static org.mockito.ArgumentMatchers.any; import static org.mockito.ArgumentMatchers.anyLong; import static org.mockito.ArgumentMatchers.eq; import static org.mockito.Mockito.doAnswer; import static org.mockito.Mockito.mock; import static org.mockito.Mockito.times; import static org.mockito.Mockito.verify; import static org.mockito.Mockito.when; @SuppressWarnings({"unchecked", "resource"}) @Timeout(60) class CoordinatorLoaderImplTest { private static class StringKeyValueDeserializer implements Deserializer<Map.Entry<String, String>> { @Override public Map.Entry<String, String> deserialize(ByteBuffer key, ByteBuffer value) throws RuntimeException { return Map.entry( StandardCharsets.UTF_8.decode(key).toString(), StandardCharsets.UTF_8.decode(value).toString() ); } } @Test void testNonexistentPartition() throws Exception { TopicPartition tp = new TopicPartition("foo", 0); Function<TopicPartition, Optional<UnifiedLog>> partitionLogSupplier = partition -> Optional.empty(); Function<TopicPartition, Optional<Long>> partitionLogEndOffsetSupplier = partition -> Optional.empty(); Deserializer<Map.Entry<String, String>> serde = mock(Deserializer.class); CoordinatorPlayback<Map.Entry<String, String>> coordinator = mock(CoordinatorPlayback.class); try (CoordinatorLoaderImpl<Map.Entry<String, String>> loader = new CoordinatorLoaderImpl<>( Time.SYSTEM, partitionLogSupplier, partitionLogEndOffsetSupplier, serde, 1000, CoordinatorLoaderImpl.DEFAULT_COMMIT_INTERVAL_OFFSETS )) { assertFutureThrows(NotLeaderOrFollowerException.class, loader.load(tp, coordinator)); } } @Test void testLoadingIsRejectedWhenClosed() throws Exception { TopicPartition tp = new TopicPartition("foo", 0); Function<TopicPartition, Optional<UnifiedLog>> partitionLogSupplier = partition -> Optional.of(mock(UnifiedLog.class)); Function<TopicPartition, Optional<Long>> partitionLogEndOffsetSupplier = partition -> Optional.empty(); Deserializer<Map.Entry<String, String>> serde = mock(Deserializer.class); CoordinatorPlayback<Map.Entry<String, String>> coordinator = mock(CoordinatorPlayback.class); try (CoordinatorLoaderImpl<Map.Entry<String, String>> loader = new CoordinatorLoaderImpl<>( Time.SYSTEM, partitionLogSupplier, partitionLogEndOffsetSupplier, serde, 1000, CoordinatorLoaderImpl.DEFAULT_COMMIT_INTERVAL_OFFSETS )) { loader.close(); assertFutureThrows(RuntimeException.class, loader.load(tp, coordinator)); } } @Test void testLoading() throws Exception { TopicPartition tp = new TopicPartition("foo", 0); UnifiedLog log = mock(UnifiedLog.class); Function<TopicPartition, Optional<UnifiedLog>> partitionLogSupplier = partition -> Optional.of(log); Function<TopicPartition, Optional<Long>> partitionLogEndOffsetSupplier = partition -> Optional.of(9L); Deserializer<Map.Entry<String, String>> serde = new StringKeyValueDeserializer(); CoordinatorPlayback<Map.Entry<String, String>> coordinator = mock(CoordinatorPlayback.class); try (CoordinatorLoaderImpl<Map.Entry<String, String>> loader = new CoordinatorLoaderImpl<>( Time.SYSTEM, partitionLogSupplier, partitionLogEndOffsetSupplier, serde, 1000, CoordinatorLoaderImpl.DEFAULT_COMMIT_INTERVAL_OFFSETS )) { when(log.logStartOffset()).thenReturn(0L); when(log.highWatermark()).thenReturn(0L); FetchDataInfo readResult1 = logReadResult(0, Arrays.asList( new SimpleRecord("k1".getBytes(), "v1".getBytes()), new SimpleRecord("k2".getBytes(), "v2".getBytes()) )); when(log.read(0L, 1000, FetchIsolation.LOG_END, true)) .thenReturn(readResult1); FetchDataInfo readResult2 = logReadResult(2, Arrays.asList( new SimpleRecord("k3".getBytes(), "v3".getBytes()), new SimpleRecord("k4".getBytes(), "v4".getBytes()), new SimpleRecord("k5".getBytes(), "v5".getBytes()) )); when(log.read(2L, 1000, FetchIsolation.LOG_END, true)) .thenReturn(readResult2); FetchDataInfo readResult3 = logReadResult(5, 100L, (short) 5, Arrays.asList( new SimpleRecord("k6".getBytes(), "v6".getBytes()), new SimpleRecord("k7".getBytes(), "v7".getBytes()) )); when(log.read(5L, 1000, FetchIsolation.LOG_END, true)) .thenReturn(readResult3); FetchDataInfo readResult4 = logReadResult( 7, 100L, (short) 5, ControlRecordType.COMMIT ); when(log.read(7L, 1000, FetchIsolation.LOG_END, true)) .thenReturn(readResult4); FetchDataInfo readResult5 = logReadResult( 8, 500L, (short) 10, ControlRecordType.ABORT ); when(log.read(8L, 1000, FetchIsolation.LOG_END, true)) .thenReturn(readResult5); CoordinatorLoader.LoadSummary summary = loader.load(tp, coordinator).get(10, TimeUnit.SECONDS); assertNotNull(summary); // Includes 7 normal + 2 control (COMMIT, ABORT) assertEquals(9, summary.numRecords()); verify(coordinator).replay(0L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, Map.entry("k1", "v1")); verify(coordinator).replay(1L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, Map.entry("k2", "v2")); verify(coordinator).replay(2L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, Map.entry("k3", "v3")); verify(coordinator).replay(3L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, Map.entry("k4", "v4")); verify(coordinator).replay(4L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, Map.entry("k5", "v5")); verify(coordinator).replay(5L, 100L, (short) 5, Map.entry("k6", "v6")); verify(coordinator).replay(6L, 100L, (short) 5, Map.entry("k7", "v7")); verify(coordinator).replayEndTransactionMarker(100L, (short) 5, TransactionResult.COMMIT); verify(coordinator).replayEndTransactionMarker(500L, (short) 10, TransactionResult.ABORT); verify(coordinator).updateLastWrittenOffset(2L); verify(coordinator).updateLastWrittenOffset(5L); verify(coordinator).updateLastWrittenOffset(7L); verify(coordinator).updateLastWrittenOffset(8L); verify(coordinator).updateLastCommittedOffset(0L); } } @Test void testLoadingStoppedWhenClosed() throws Exception { TopicPartition tp = new TopicPartition("foo", 0); UnifiedLog log = mock(UnifiedLog.class); Function<TopicPartition, Optional<UnifiedLog>> partitionLogSupplier = partition -> Optional.of(log); Function<TopicPartition, Optional<Long>> partitionLogEndOffsetSupplier = partition -> Optional.of(100L); Deserializer<Map.Entry<String, String>> serde = new StringKeyValueDeserializer(); CoordinatorPlayback<Map.Entry<String, String>> coordinator = mock(CoordinatorPlayback.class); try (CoordinatorLoaderImpl<Map.Entry<String, String>> loader = new CoordinatorLoaderImpl<>( Time.SYSTEM, partitionLogSupplier, partitionLogEndOffsetSupplier, serde, 1000, CoordinatorLoaderImpl.DEFAULT_COMMIT_INTERVAL_OFFSETS )) { when(log.logStartOffset()).thenReturn(0L); FetchDataInfo readResult = logReadResult(0, Arrays.asList( new SimpleRecord("k1".getBytes(), "v1".getBytes()), new SimpleRecord("k2".getBytes(), "v2".getBytes()) )); CountDownLatch latch = new CountDownLatch(1); when(log.read( anyLong(), eq(1000), eq(FetchIsolation.LOG_END), eq(true) )).thenAnswer((InvocationOnMock invocation) -> { latch.countDown(); return readResult; }); CompletableFuture<CoordinatorLoader.LoadSummary> result = loader.load(tp, coordinator); boolean completed = latch.await(10, TimeUnit.SECONDS); assertTrue(completed, "Log read timeout: Latch did not count down in time."); loader.close(); RuntimeException ex = assertFutureThrows(RuntimeException.class, result); assertNotNull(ex); assertEquals("Coordinator loader is closed.", ex.getMessage()); } } @Test void testUnknownRecordTypeAreIgnored() throws Exception { TopicPartition tp = new TopicPartition("foo", 0); UnifiedLog log = mock(UnifiedLog.class); Function<TopicPartition, Optional<UnifiedLog>> partitionLogSupplier = partition -> Optional.of(log); Function<TopicPartition, Optional<Long>> partitionLogEndOffsetSupplier = partition -> Optional.of(2L); StringKeyValueDeserializer serde = mock(StringKeyValueDeserializer.class); CoordinatorPlayback<Map.Entry<String, String>> coordinator = mock(CoordinatorPlayback.class); try (CoordinatorLoaderImpl<Map.Entry<String, String>> loader = new CoordinatorLoaderImpl<>( Time.SYSTEM, partitionLogSupplier, partitionLogEndOffsetSupplier, serde, 1000, CoordinatorLoaderImpl.DEFAULT_COMMIT_INTERVAL_OFFSETS )) { when(log.logStartOffset()).thenReturn(0L); FetchDataInfo readResult = logReadResult(0, Arrays.asList( new SimpleRecord("k1".getBytes(), "v1".getBytes()), new SimpleRecord("k2".getBytes(), "v2".getBytes()) )); when(log.read(0L, 1000, FetchIsolation.LOG_END, true)) .thenReturn(readResult); when(serde.deserialize(any(ByteBuffer.class), any(ByteBuffer.class))) .thenThrow(new Deserializer.UnknownRecordTypeException((short) 1)) .thenReturn(Map.entry("k2", "v2")); loader.load(tp, coordinator).get(10, TimeUnit.SECONDS); verify(coordinator).replay(1L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, Map.entry("k2", "v2")); } } @Test void testDeserializationErrorFailsTheLoading() throws Exception { TopicPartition tp = new TopicPartition("foo", 0); UnifiedLog log = mock(UnifiedLog.class); Function<TopicPartition, Optional<UnifiedLog>> partitionLogSupplier = partition -> Optional.of(log); Function<TopicPartition, Optional<Long>> partitionLogEndOffsetSupplier = partition -> Optional.of(2L); StringKeyValueDeserializer serde = mock(StringKeyValueDeserializer.class); CoordinatorPlayback<Map.Entry<String, String>> coordinator = mock(CoordinatorPlayback.class); try (CoordinatorLoaderImpl<Map.Entry<String, String>> loader = new CoordinatorLoaderImpl<>( Time.SYSTEM, partitionLogSupplier, partitionLogEndOffsetSupplier, serde, 1000, CoordinatorLoaderImpl.DEFAULT_COMMIT_INTERVAL_OFFSETS )) { when(log.logStartOffset()).thenReturn(0L); FetchDataInfo readResult = logReadResult(0, Arrays.asList( new SimpleRecord("k1".getBytes(), "v1".getBytes()), new SimpleRecord("k2".getBytes(), "v2".getBytes()) )); when(log.read(0L, 1000, FetchIsolation.LOG_END, true)) .thenReturn(readResult); when(serde.deserialize(any(ByteBuffer.class), any(ByteBuffer.class))) .thenThrow(new RuntimeException("Error!")); RuntimeException ex = assertFutureThrows(RuntimeException.class, loader.load(tp, coordinator)); assertNotNull(ex); assertEquals(String.format("Deserializing record DefaultRecord(offset=0, timestamp=-1, key=2 bytes, value=2 bytes) from %s failed.", tp), ex.getMessage()); } } @Test void testLoadGroupAndOffsetsWithCorruptedLog() throws Exception { // Simulate a case where startOffset < endOffset but log is empty. This could theoretically happen // when all the records are expired and the active segment is truncated or when the partition // is accidentally corrupted. TopicPartition tp = new TopicPartition("foo", 0); UnifiedLog log = mock(UnifiedLog.class); Function<TopicPartition, Optional<UnifiedLog>> partitionLogSupplier = partition -> Optional.of(log); Function<TopicPartition, Optional<Long>> partitionLogEndOffsetSupplier = partition -> Optional.of(10L); StringKeyValueDeserializer serde = mock(StringKeyValueDeserializer.class); CoordinatorPlayback<Map.Entry<String, String>> coordinator = mock(CoordinatorPlayback.class); try (CoordinatorLoaderImpl<Map.Entry<String, String>> loader = new CoordinatorLoaderImpl<>( Time.SYSTEM, partitionLogSupplier, partitionLogEndOffsetSupplier, serde, 1000, CoordinatorLoaderImpl.DEFAULT_COMMIT_INTERVAL_OFFSETS )) { when(log.logStartOffset()).thenReturn(0L); FetchDataInfo readResult = logReadResult(0, List.of()); when(log.read(0L, 1000, FetchIsolation.LOG_END, true)) .thenReturn(readResult); assertNotNull(loader.load(tp, coordinator).get(10, TimeUnit.SECONDS)); } } @Test void testLoadSummary() throws Exception { TopicPartition tp = new TopicPartition("foo", 0); UnifiedLog log = mock(UnifiedLog.class); Function<TopicPartition, Optional<UnifiedLog>> partitionLogSupplier = partition -> Optional.of(log); Function<TopicPartition, Optional<Long>> partitionLogEndOffsetSupplier = partition -> Optional.of(5L); StringKeyValueDeserializer serde = new StringKeyValueDeserializer(); CoordinatorPlayback<Map.Entry<String, String>> coordinator = mock(CoordinatorPlayback.class); MockTime time = new MockTime(); try (CoordinatorLoaderImpl<Map.Entry<String, String>> loader = new CoordinatorLoaderImpl<>( time, partitionLogSupplier, partitionLogEndOffsetSupplier, serde, 1000, CoordinatorLoaderImpl.DEFAULT_COMMIT_INTERVAL_OFFSETS )) { long startTimeMs = time.milliseconds(); when(log.logStartOffset()).thenReturn(0L); FetchDataInfo readResult1 = logReadResult(0, Arrays.asList( new SimpleRecord("k1".getBytes(), "v1".getBytes()), new SimpleRecord("k2".getBytes(), "v2".getBytes()) )); when(log.read(0L, 1000, FetchIsolation.LOG_END, true)) .thenAnswer((InvocationOnMock invocation) -> { time.sleep(1000); return readResult1; }); FetchDataInfo readResult2 = logReadResult(2, Arrays.asList( new SimpleRecord("k3".getBytes(), "v3".getBytes()), new SimpleRecord("k4".getBytes(), "v4".getBytes()), new SimpleRecord("k5".getBytes(), "v5".getBytes()) )); when(log.read(2L, 1000, FetchIsolation.LOG_END, true)) .thenReturn(readResult2); CoordinatorLoader.LoadSummary summary = loader.load(tp, coordinator).get(10, TimeUnit.SECONDS); assertEquals(startTimeMs, summary.startTimeMs()); assertEquals(startTimeMs + 1000, summary.endTimeMs()); assertEquals(5, summary.numRecords()); assertEquals(readResult1.records.sizeInBytes() + readResult2.records.sizeInBytes(), summary.numBytes()); } } @Test void testUpdateLastWrittenOffsetOnBatchLoaded() throws Exception { TopicPartition tp = new TopicPartition("foo", 0); UnifiedLog log = mock(UnifiedLog.class); Function<TopicPartition, Optional<UnifiedLog>> partitionLogSupplier = partition -> Optional.of(log); Function<TopicPartition, Optional<Long>> partitionLogEndOffsetSupplier = partition -> Optional.of(7L); StringKeyValueDeserializer serde = new StringKeyValueDeserializer(); CoordinatorPlayback<Map.Entry<String, String>> coordinator = mock(CoordinatorPlayback.class); try (CoordinatorLoaderImpl<Map.Entry<String, String>> loader = new CoordinatorLoaderImpl<>( Time.SYSTEM, partitionLogSupplier, partitionLogEndOffsetSupplier, serde, 1000, CoordinatorLoaderImpl.DEFAULT_COMMIT_INTERVAL_OFFSETS )) { when(log.logStartOffset()).thenReturn(0L); when(log.highWatermark()).thenReturn(0L, 0L, 2L); FetchDataInfo readResult1 = logReadResult(0, Arrays.asList( new SimpleRecord("k1".getBytes(), "v1".getBytes()), new SimpleRecord("k2".getBytes(), "v2".getBytes()) )); when(log.read(0L, 1000, FetchIsolation.LOG_END, true)) .thenReturn(readResult1); FetchDataInfo readResult2 = logReadResult(2, Arrays.asList( new SimpleRecord("k3".getBytes(), "v3".getBytes()), new SimpleRecord("k4".getBytes(), "v4".getBytes()), new SimpleRecord("k5".getBytes(), "v5".getBytes()) )); when(log.read(2L, 1000, FetchIsolation.LOG_END, true)) .thenReturn(readResult2); FetchDataInfo readResult3 = logReadResult(5, Arrays.asList( new SimpleRecord("k6".getBytes(), "v6".getBytes()), new SimpleRecord("k7".getBytes(), "v7".getBytes()) )); when(log.read(5L, 1000, FetchIsolation.LOG_END, true)) .thenReturn(readResult3); assertNotNull(loader.load(tp, coordinator).get(10, TimeUnit.SECONDS)); verify(coordinator).replay(0L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, Map.entry("k1", "v1")); verify(coordinator).replay(1L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, Map.entry("k2", "v2")); verify(coordinator).replay(2L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, Map.entry("k3", "v3")); verify(coordinator).replay(3L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, Map.entry("k4", "v4")); verify(coordinator).replay(4L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, Map.entry("k5", "v5")); verify(coordinator).replay(5L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, Map.entry("k6", "v6")); verify(coordinator).replay(6L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, Map.entry("k7", "v7")); verify(coordinator, times(0)).updateLastWrittenOffset(0L); verify(coordinator, times(1)).updateLastWrittenOffset(2L); verify(coordinator, times(1)).updateLastWrittenOffset(5L); verify(coordinator, times(1)).updateLastWrittenOffset(7L); verify(coordinator, times(1)).updateLastCommittedOffset(0L); verify(coordinator, times(1)).updateLastCommittedOffset(2L); verify(coordinator, times(0)).updateLastCommittedOffset(5L); } } @Test void testUpdateLastWrittenOffsetAndUpdateLastCommittedOffsetNoRecordsRead() throws Exception { TopicPartition tp = new TopicPartition("foo", 0); UnifiedLog log = mock(UnifiedLog.class); Function<TopicPartition, Optional<UnifiedLog>> partitionLogSupplier = partition -> Optional.of(log); Function<TopicPartition, Optional<Long>> partitionLogEndOffsetSupplier = partition -> Optional.of(0L); StringKeyValueDeserializer serde = new StringKeyValueDeserializer(); CoordinatorPlayback<Map.Entry<String, String>> coordinator = mock(CoordinatorPlayback.class); try (CoordinatorLoaderImpl<Map.Entry<String, String>> loader = new CoordinatorLoaderImpl<>( Time.SYSTEM, partitionLogSupplier, partitionLogEndOffsetSupplier, serde, 1000, CoordinatorLoaderImpl.DEFAULT_COMMIT_INTERVAL_OFFSETS )) { when(log.logStartOffset()).thenReturn(0L); when(log.highWatermark()).thenReturn(0L); assertNotNull(loader.load(tp, coordinator).get(10, TimeUnit.SECONDS)); verify(coordinator, times(0)).updateLastWrittenOffset(anyLong()); verify(coordinator, times(0)).updateLastCommittedOffset(anyLong()); } } @Test void testUpdateLastWrittenOffsetOnBatchLoadedWhileHighWatermarkAhead() throws Exception { TopicPartition tp = new TopicPartition("foo", 0); UnifiedLog log = mock(UnifiedLog.class); Function<TopicPartition, Optional<UnifiedLog>> partitionLogSupplier = partition -> Optional.of(log); Function<TopicPartition, Optional<Long>> partitionLogEndOffsetSupplier = partition -> Optional.of(7L); StringKeyValueDeserializer serde = new StringKeyValueDeserializer(); CoordinatorPlayback<Map.Entry<String, String>> coordinator = mock(CoordinatorPlayback.class); try (CoordinatorLoaderImpl<Map.Entry<String, String>> loader = new CoordinatorLoaderImpl<>( Time.SYSTEM, partitionLogSupplier, partitionLogEndOffsetSupplier, serde, 1000, CoordinatorLoaderImpl.DEFAULT_COMMIT_INTERVAL_OFFSETS )) { when(log.logStartOffset()).thenReturn(0L); when(log.highWatermark()).thenReturn(5L, 7L, 7L); FetchDataInfo readResult1 = logReadResult(0, Arrays.asList( new SimpleRecord("k1".getBytes(), "v1".getBytes()), new SimpleRecord("k2".getBytes(), "v2".getBytes()) )); when(log.read(0L, 1000, FetchIsolation.LOG_END, true)) .thenReturn(readResult1); FetchDataInfo readResult2 = logReadResult(2, Arrays.asList( new SimpleRecord("k3".getBytes(), "v3".getBytes()), new SimpleRecord("k4".getBytes(), "v4".getBytes()), new SimpleRecord("k5".getBytes(), "v5".getBytes()) )); when(log.read(2L, 1000, FetchIsolation.LOG_END, true)) .thenReturn(readResult2); FetchDataInfo readResult3 = logReadResult(5, Arrays.asList( new SimpleRecord("k6".getBytes(), "v6".getBytes()), new SimpleRecord("k7".getBytes(), "v7".getBytes()) )); when(log.read(5L, 1000, FetchIsolation.LOG_END, true)) .thenReturn(readResult3); assertNotNull(loader.load(tp, coordinator).get(10, TimeUnit.SECONDS)); verify(coordinator).replay(0L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, Map.entry("k1", "v1")); verify(coordinator).replay(1L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, Map.entry("k2", "v2")); verify(coordinator).replay(2L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, Map.entry("k3", "v3")); verify(coordinator).replay(3L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, Map.entry("k4", "v4")); verify(coordinator).replay(4L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, Map.entry("k5", "v5")); verify(coordinator).replay(5L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, Map.entry("k6", "v6")); verify(coordinator).replay(6L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, Map.entry("k7", "v7")); verify(coordinator, times(0)).updateLastWrittenOffset(0L); verify(coordinator, times(0)).updateLastWrittenOffset(2L); verify(coordinator, times(0)).updateLastWrittenOffset(5L); verify(coordinator, times(1)).updateLastWrittenOffset(7L); verify(coordinator, times(0)).updateLastCommittedOffset(0L); verify(coordinator, times(0)).updateLastCommittedOffset(2L); verify(coordinator, times(0)).updateLastCommittedOffset(5L); verify(coordinator, times(1)).updateLastCommittedOffset(7L); } } @Test void testUpdateLastWrittenOffsetCommitInterval() throws Exception { TopicPartition tp = new TopicPartition("foo", 0); UnifiedLog log = mock(UnifiedLog.class); Function<TopicPartition, Optional<UnifiedLog>> partitionLogSupplier = partition -> Optional.of(log); Function<TopicPartition, Optional<Long>> partitionLogEndOffsetSupplier = partition -> Optional.of(7L); StringKeyValueDeserializer serde = new StringKeyValueDeserializer(); CoordinatorPlayback<Map.Entry<String, String>> coordinator = mock(CoordinatorPlayback.class); try (CoordinatorLoaderImpl<Map.Entry<String, String>> loader = new CoordinatorLoaderImpl<>( Time.SYSTEM, partitionLogSupplier, partitionLogEndOffsetSupplier, serde, 1000, 2L )) { when(log.logStartOffset()).thenReturn(0L); when(log.highWatermark()).thenReturn(7L); FetchDataInfo readResult1 = logReadResult(0, Arrays.asList( new SimpleRecord("k1".getBytes(), "v1".getBytes()), new SimpleRecord("k2".getBytes(), "v2".getBytes()) )); when(log.read(0L, 1000, FetchIsolation.LOG_END, true)) .thenReturn(readResult1); FetchDataInfo readResult2 = logReadResult(2, Arrays.asList( new SimpleRecord("k3".getBytes(), "v3".getBytes()), new SimpleRecord("k4".getBytes(), "v4".getBytes()), new SimpleRecord("k5".getBytes(), "v5".getBytes()) )); when(log.read(2L, 1000, FetchIsolation.LOG_END, true)) .thenReturn(readResult2); FetchDataInfo readResult3 = logReadResult(5, Arrays.asList( new SimpleRecord("k6".getBytes(), "v6".getBytes()) )); when(log.read(5L, 1000, FetchIsolation.LOG_END, true)) .thenReturn(readResult3); FetchDataInfo readResult4 = logReadResult(6, Arrays.asList( new SimpleRecord("k7".getBytes(), "v7".getBytes()) )); when(log.read(6L, 1000, FetchIsolation.LOG_END, true)) .thenReturn(readResult4); assertNotNull(loader.load(tp, coordinator).get(10, TimeUnit.SECONDS)); verify(coordinator).replay(0L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, Map.entry("k1", "v1")); verify(coordinator).replay(1L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, Map.entry("k2", "v2")); verify(coordinator).replay(2L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, Map.entry("k3", "v3")); verify(coordinator).replay(3L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, Map.entry("k4", "v4")); verify(coordinator).replay(4L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, Map.entry("k5", "v5")); verify(coordinator).replay(5L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, Map.entry("k6", "v6")); verify(coordinator).replay(6L, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, Map.entry("k7", "v7")); verify(coordinator, times(0)).updateLastWrittenOffset(0L); verify(coordinator, times(1)).updateLastWrittenOffset(2L); verify(coordinator, times(1)).updateLastWrittenOffset(5L); verify(coordinator, times(0)).updateLastWrittenOffset(6L); verify(coordinator, times(1)).updateLastWrittenOffset(7L); verify(coordinator, times(0)).updateLastCommittedOffset(0L); verify(coordinator, times(1)).updateLastCommittedOffset(2L); verify(coordinator, times(1)).updateLastCommittedOffset(5L); verify(coordinator, times(0)).updateLastCommittedOffset(6L); verify(coordinator, times(1)).updateLastCommittedOffset(7L); } } @Test void testPartitionGoesOfflineDuringLoad() throws Exception { TopicPartition tp = new TopicPartition("foo", 0); UnifiedLog log = mock(UnifiedLog.class); Function<TopicPartition, Optional<UnifiedLog>> partitionLogSupplier = partition -> Optional.of(log); Function<TopicPartition, Optional<Long>> partitionLogEndOffsetSupplier = mock(Function.class); StringKeyValueDeserializer serde = new StringKeyValueDeserializer(); CoordinatorPlayback<Map.Entry<String, String>> coordinator = mock(CoordinatorPlayback.class); try (CoordinatorLoaderImpl<Map.Entry<String, String>> loader = new CoordinatorLoaderImpl<>( Time.SYSTEM, partitionLogSupplier, partitionLogEndOffsetSupplier, serde, 1000, CoordinatorLoaderImpl.DEFAULT_COMMIT_INTERVAL_OFFSETS )) { when(log.logStartOffset()).thenReturn(0L); when(log.highWatermark()).thenReturn(0L); when(partitionLogEndOffsetSupplier.apply(tp)).thenReturn(Optional.of(5L)).thenReturn(Optional.of(-1L)); FetchDataInfo readResult1 = logReadResult(0, Arrays.asList( new SimpleRecord("k1".getBytes(), "v1".getBytes()), new SimpleRecord("k2".getBytes(), "v2".getBytes()) )); when(log.read(0L, 1000, FetchIsolation.LOG_END, true)) .thenReturn(readResult1); FetchDataInfo readResult2 = logReadResult(2, Arrays.asList( new SimpleRecord("k3".getBytes(), "v3".getBytes()), new SimpleRecord("k4".getBytes(), "v4".getBytes()), new SimpleRecord("k5".getBytes(), "v5".getBytes()) )); when(log.read(2L, 1000, FetchIsolation.LOG_END, true)) .thenReturn(readResult2); assertFutureThrows(NotLeaderOrFollowerException.class, loader.load(tp, coordinator)); } } private FetchDataInfo logReadResult(long startOffset, List<SimpleRecord> records) throws IOException { return logReadResult(startOffset, RecordBatch.NO_PRODUCER_ID, RecordBatch.NO_PRODUCER_EPOCH, records); } private FetchDataInfo logReadResult( long startOffset, long producerId, short producerEpoch, List<SimpleRecord> records ) throws IOException { FileRecords fileRecords = mock(FileRecords.class); MemoryRecords memoryRecords; if (producerId == RecordBatch.NO_PRODUCER_ID) { memoryRecords = MemoryRecords.withRecords( startOffset, Compression.NONE, records.toArray(new SimpleRecord[0]) ); } else { memoryRecords = MemoryRecords.withTransactionalRecords( startOffset, Compression.NONE, producerId, producerEpoch, 0, RecordBatch.NO_PARTITION_LEADER_EPOCH, records.toArray(new SimpleRecord[0]) ); } when(fileRecords.sizeInBytes()).thenReturn(memoryRecords.sizeInBytes()); doAnswer(invocation -> { ByteBuffer buffer = invocation.getArgument(0, ByteBuffer.class); buffer.put(memoryRecords.buffer().duplicate()); buffer.flip(); return null; }).when(fileRecords).readInto(any(ByteBuffer.class), ArgumentMatchers.anyInt()); return new FetchDataInfo(new LogOffsetMetadata(startOffset), fileRecords); } private FetchDataInfo logReadResult( long startOffset, long producerId, short producerEpoch, ControlRecordType controlRecordType ) throws IOException { FileRecords fileRecords = mock(FileRecords.class); MemoryRecords memoryRecords = MemoryRecords.withEndTransactionMarker( startOffset, 0L, RecordBatch.NO_PARTITION_LEADER_EPOCH, producerId, producerEpoch, new EndTransactionMarker(controlRecordType, 0) ); when(fileRecords.sizeInBytes()).thenReturn(memoryRecords.sizeInBytes()); doAnswer(invocation -> { ByteBuffer buffer = invocation.getArgument(0, ByteBuffer.class); buffer.put(memoryRecords.buffer().duplicate()); buffer.flip(); return null; }).when(fileRecords).readInto(any(ByteBuffer.class), ArgumentMatchers.anyInt()); return new FetchDataInfo(new LogOffsetMetadata(startOffset), fileRecords); } }
openjdk/jdk8
36,140
jdk/src/share/classes/sun/tools/javac/SourceMember.java
/* * Copyright (c) 1994, 2004, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ package sun.tools.javac; import sun.tools.java.*; import sun.tools.tree.*; import sun.tools.asm.*; import java.util.Vector; import java.util.Enumeration; import java.util.Hashtable; import java.io.PrintStream; /** * A Source Member * * WARNING: The contents of this source file are not part of any * supported API. Code that depends on them does so at its own risk: * they are subject to change or removal without notice. */ @Deprecated public class SourceMember extends MemberDefinition implements Constants { /** * The argument names (if it is a method) */ Vector args; // set to the MemberDefinition in the interface if we have this field because // it has been forced on us MemberDefinition abstractSource; /** * The status of the field */ int status; static final int PARSED = 0; static final int CHECKING = 1; static final int CHECKED = 2; static final int INLINING = 3; static final int INLINED = 4; static final int ERROR = 5; public Vector getArguments() { return args; } /** * Constructor * @param argNames a vector of IdentifierToken */ public SourceMember(long where, ClassDefinition clazz, String doc, int modifiers, Type type, Identifier name, Vector argNames, IdentifierToken exp[], Node value) { super(where, clazz, modifiers, type, name, exp, value); this.documentation = doc; this.args = argNames; // for the moment // not until type names are resolved: createArgumentFields(argNames); if (ClassDefinition.containsDeprecated(documentation)) { this.modifiers |= M_DEPRECATED; } } void createArgumentFields(Vector argNames) { // Create a list of arguments if (isMethod()) { args = new Vector(); if (isConstructor() || !(isStatic() || isInitializer())) { args.addElement(((SourceClass)clazz).getThisArgument()); } if (argNames != null) { Enumeration e = argNames.elements(); Type argTypes[] = getType().getArgumentTypes(); for (int i = 0 ; i < argTypes.length ; i++) { Object x = e.nextElement(); if (x instanceof LocalMember) { // This should not happen, but it does // in cases of vicious cyclic inheritance. args = argNames; return; } Identifier id; int mod; long where; if (x instanceof Identifier) { // allow argNames to be simple Identifiers (deprecated!) id = (Identifier)x; mod = 0; where = getWhere(); } else { IdentifierToken token = (IdentifierToken)x; id = token.getName(); mod = token.getModifiers(); where = token.getWhere(); } args.addElement(new LocalMember(where, clazz, mod, argTypes[i], id)); } } } } // The methods addOuterThis() and addUplevelArguments() were // both originally part of a single method called addUplevelArguments() // which took a single boolean parameter describing which of the // two behaviors it wanted. // // The original addUplevelArguments() claimed to keep the arguments in // the following order: // // (1) <this> <early outer this> <uplevel arguments...> <true arguments...> // // (By <early outer this> I am referring to the clientOuterField added // to some constructors when they are created. If an outer this is // added later, on demand, then this is mixed in with the rest of the // uplevel arguments and is added by addUplevelArguments.) // // In reality, the `args' Vector was generated in this order, but the // Type array `argTypes' was generated as: // // (2) <this> <uplevel arguments...> <early outer this> <true arguments...> // // This didn't make a difference in the common case -- that is, when // a class had an <outer.this> or <uplevel arguments...> but not both. // Both can happen in the case that a member class is declared inside // of a local class. It seems that the calling sequences, generated // in places like NewInstanceExpression.codeCommon(), use order (2), // so I have changed the code below to stick with that order. Since // the only time this happens is in classes which are insideLocal, no // one should be able to tell the difference between these orders. // (bug number 4085633) LocalMember outerThisArg = null; /** * Get outer instance link, or null if none. */ public LocalMember getOuterThisArg() { return outerThisArg; } /** * Add the outer.this argument to the list of arguments for this * constructor. This is called from resolveTypeStructure. Any * additional uplevel arguments get added later by addUplevelArguments(). */ void addOuterThis() { UplevelReference refs = clazz.getReferences(); // See if we have a client outer field. while (refs != null && !refs.isClientOuterField()) { refs = refs.getNext(); } // There is no outer this argument. Quit. if (refs == null) { return; } // Get the old arg types. Type oldArgTypes[] = type.getArgumentTypes(); // And make an array for the new ones with space for one more. Type argTypes[] = new Type[oldArgTypes.length + 1]; LocalMember arg = refs.getLocalArgument(); outerThisArg = arg; // args is our list of arguments. It contains a `this', so // we insert at position 1. The list of types does not have a // this, so we insert at position 0. args.insertElementAt(arg, 1); argTypes[0] = arg.getType(); // Add on the rest of the constructor arguments. for (int i = 0; i < oldArgTypes.length; i++) { argTypes[i + 1] = oldArgTypes[i]; } type = Type.tMethod(type.getReturnType(), argTypes); } /** * Prepend argument names and argument types for local variable references. * This information is never seen by the type-check phase, * but it affects code generation, which is the earliest moment * we have comprehensive information on uplevel references. * The code() methods tweaks the constructor calls, prepending * the proper values to the argument list. */ void addUplevelArguments() { UplevelReference refs = clazz.getReferences(); clazz.getReferencesFrozen(); // Count how many uplevels we have to add. int count = 0; for (UplevelReference r = refs; r != null; r = r.getNext()) { if (!r.isClientOuterField()) { count += 1; } } if (count == 0) { // None to add, quit. return; } // Get the old argument types. Type oldArgTypes[] = type.getArgumentTypes(); // Make an array with enough room for the new. Type argTypes[] = new Type[oldArgTypes.length + count]; // Add all of the late uplevel references to args and argTypes. // Note that they are `off-by-one' because of the `this'. int ins = 0; for (UplevelReference r = refs; r != null; r = r.getNext()) { if (!r.isClientOuterField()) { LocalMember arg = r.getLocalArgument(); args.insertElementAt(arg, 1 + ins); argTypes[ins] = arg.getType(); ins++; } } // Add the rest of the old arguments. for (int i = 0; i < oldArgTypes.length; i++) { argTypes[ins + i] = oldArgTypes[i]; } type = Type.tMethod(type.getReturnType(), argTypes); } /** * Constructor for an inner class. */ public SourceMember(ClassDefinition innerClass) { super(innerClass); } /** * Constructor. * Used only to generate an abstract copy of a method that a class * inherits from an interface */ public SourceMember(MemberDefinition f, ClassDefinition c, Environment env) { this(f.getWhere(), c, f.getDocumentation(), f.getModifiers() | M_ABSTRACT, f.getType(), f.getName(), null, f.getExceptionIds(), null); this.args = f.getArguments(); this.abstractSource = f; this.exp = f.getExceptions(env); } /** * Get exceptions */ public ClassDeclaration[] getExceptions(Environment env) { if ((!isMethod()) || (exp != null)) { return exp; } if (expIds == null) { // (should not happen) exp = new ClassDeclaration[0]; return exp; } // be sure to get the imports right: env = ((SourceClass)getClassDefinition()).setupEnv(env); exp = new ClassDeclaration[expIds.length]; for (int i = 0; i < exp.length; i++) { Identifier e = expIds[i].getName(); Identifier rexp = getClassDefinition().resolveName(env, e); exp[i] = env.getClassDeclaration(rexp); } return exp; } /** * Set array of name-resolved exceptions directly, e.g., for access methods. */ public void setExceptions(ClassDeclaration[] exp) { this.exp = exp; } /** * Resolve types in a field, after parsing. * @see ClassDefinition.resolveTypeStructure */ public boolean resolved = false; public void resolveTypeStructure(Environment env) { if (tracing) env.dtEnter("SourceMember.resolveTypeStructure: " + this); // A member should only be resolved once. For a constructor, it is imperative // that 'addOuterThis' be called only once, else the outer instance argument may // be inserted into the argument list multiple times. if (resolved) { if (tracing) env.dtEvent("SourceMember.resolveTypeStructure: OK " + this); // This case shouldn't be happening. It is the responsibility // of our callers to avoid attempting multiple resolutions of a member. // *** REMOVE FOR SHIPMENT? *** throw new CompilerError("multiple member type resolution"); //return; } else { if (tracing) env.dtEvent("SourceMember.resolveTypeStructure: RESOLVING " + this); resolved = true; } super.resolveTypeStructure(env); if (isInnerClass()) { ClassDefinition nc = getInnerClass(); if (nc instanceof SourceClass && !nc.isLocal()) { ((SourceClass)nc).resolveTypeStructure(env); } type = innerClass.getType(); } else { // Expand all class names in 'type', including those that are not // fully-qualified or refer to inner classes, into fully-qualified // names. Local and anonymous classes get synthesized names here, // corresponding to the class files that will be generated. This is // currently the only place where 'resolveNames' is used. type = env.resolveNames(getClassDefinition(), type, isSynthetic()); // do the throws also: getExceptions(env); if (isMethod()) { Vector argNames = args; args = null; createArgumentFields(argNames); // Add outer instance argument for constructors. if (isConstructor()) { addOuterThis(); } } } if (tracing) env.dtExit("SourceMember.resolveTypeStructure: " + this); } /** * Get the class declaration in which the field is actually defined */ public ClassDeclaration getDefiningClassDeclaration() { if (abstractSource == null) return super.getDefiningClassDeclaration(); else return abstractSource.getDefiningClassDeclaration(); } /** * A source field never reports deprecation, since the compiler * allows access to deprecated features that are being compiled * in the same job. */ public boolean reportDeprecated(Environment env) { return false; } /** * Check this field. * <p> * This is the method which requests checking. * The real work is done by * <tt>Vset check(Environment, Context, Vset)</tt>. */ public void check(Environment env) throws ClassNotFound { if (tracing) env.dtEnter("SourceMember.check: " + getName() + ", status = " + status); // rely on the class to check all fields in the proper order if (status == PARSED) { if (isSynthetic() && getValue() == null) { // break a big cycle for small synthetic variables status = CHECKED; if (tracing) env.dtExit("SourceMember.check: BREAKING CYCLE"); return; } if (tracing) env.dtEvent("SourceMember.check: CHECKING CLASS"); clazz.check(env); if (status == PARSED) { if (getClassDefinition().getError()) { status = ERROR; } else { if (tracing) env.dtExit("SourceMember.check: CHECK FAILED"); throw new CompilerError("check failed"); } } } if (tracing) env.dtExit("SourceMember.check: DONE " + getName() + ", status = " + status); } /** * Check a field. * @param vset tells which uplevel variables are definitely assigned * The vset is also used to track the initialization of blank finals * by whichever fields which are relevant to them. */ public Vset check(Environment env, Context ctx, Vset vset) throws ClassNotFound { if (tracing) env.dtEvent("SourceMember.check: MEMBER " + getName() + ", status = " + status); if (status == PARSED) { if (isInnerClass()) { // some classes are checked separately ClassDefinition nc = getInnerClass(); if (nc instanceof SourceClass && !nc.isLocal() && nc.isInsideLocal()) { status = CHECKING; vset = ((SourceClass)nc).checkInsideClass(env, ctx, vset); } status = CHECKED; return vset; } if (env.dump()) { System.out.println("[check field " + getClassDeclaration().getName() + "." + getName() + "]"); if (getValue() != null) { getValue().print(System.out); System.out.println(); } } env = new Environment(env, this); // This is where all checking of names appearing within the type // of the member is done. Includes return type and argument types. // Since only one location ('where') for error messages is provided, // localization of errors is poor. Throws clauses are handled below. env.resolve(where, getClassDefinition(), getType()); // Make sure that all the classes that we claim to throw really // are subclasses of Throwable, and are classes that we can reach if (isMethod()) { ClassDeclaration throwable = env.getClassDeclaration(idJavaLangThrowable); ClassDeclaration exp[] = getExceptions(env); for (int i = 0 ; i < exp.length ; i++) { ClassDefinition def; long where = getWhere(); if (expIds != null && i < expIds.length) { where = IdentifierToken.getWhere(expIds[i], where); } try { def = exp[i].getClassDefinition(env); // Validate access for all inner-class components // of a qualified name, not just the last one, which // is checked below. Yes, this is a dirty hack... // Part of fix for 4094658. env.resolveByName(where, getClassDefinition(), def.getName()); } catch (ClassNotFound e) { env.error(where, "class.not.found", e.name, "throws"); break; } def.noteUsedBy(getClassDefinition(), where, env); if (!getClassDefinition(). canAccess(env, def.getClassDeclaration())) { env.error(where, "cant.access.class", def); } else if (!def.subClassOf(env, throwable)) { env.error(where, "throws.not.throwable", def); } } } status = CHECKING; if (isMethod() && args != null) { int length = args.size(); outer_loop: for (int i = 0; i < length; i++) { LocalMember lf = (LocalMember)(args.elementAt(i)); Identifier name_i = lf.getName(); for (int j = i + 1; j < length; j++) { LocalMember lf2 = (LocalMember)(args.elementAt(j)); Identifier name_j = lf2.getName(); if (name_i.equals(name_j)) { env.error(lf2.getWhere(), "duplicate.argument", name_i); break outer_loop; } } } } if (getValue() != null) { ctx = new Context(ctx, this); if (isMethod()) { Statement s = (Statement)getValue(); // initialize vset, indication that each of the arguments // to the function has a value for (Enumeration e = args.elements(); e.hasMoreElements();){ LocalMember f = (LocalMember)e.nextElement(); vset.addVar(ctx.declare(env, f)); } if (isConstructor()) { // Undefine "this" in some constructors, until after // the super constructor has been called. vset.clearVar(ctx.getThisNumber()); // If the first thing in the definition isn't a call // to either super() or this(), then insert one. Expression supCall = s.firstConstructor(); if ((supCall == null) && (getClassDefinition().getSuperClass() != null)) { supCall = getDefaultSuperCall(env); Statement scs = new ExpressionStatement(where, supCall); s = Statement.insertStatement(scs, s); setValue(s); } } //System.out.println("VSET = " + vset); ClassDeclaration exp[] = getExceptions(env); int htsize = (exp.length > 3) ? 17 : 7; Hashtable thrown = new Hashtable(htsize); vset = s.checkMethod(env, ctx, vset, thrown); ClassDeclaration ignore1 = env.getClassDeclaration(idJavaLangError); ClassDeclaration ignore2 = env.getClassDeclaration(idJavaLangRuntimeException); for (Enumeration e = thrown.keys(); e.hasMoreElements();) { ClassDeclaration c = (ClassDeclaration)e.nextElement(); ClassDefinition def = c.getClassDefinition(env); if (def.subClassOf(env, ignore1) || def.subClassOf(env, ignore2)) { continue; } boolean ok = false; if (!isInitializer()) { for (int i = 0 ; i < exp.length ; i++) { if (def.subClassOf(env, exp[i])) { ok = true; } } } if (!ok) { Node n = (Node)thrown.get(c); long where = n.getWhere(); String errorMsg; if (isConstructor()) { if (where == getClassDefinition().getWhere()) { // If this message is being generated for // a default constructor, we should give // a different error message. Currently // we check for this by seeing if the // constructor has the same "where" as // its class. This is a bit kludgy, but // works. (bug id 4034836) errorMsg = "def.constructor.exception"; } else { // Constructor with uncaught exception. errorMsg = "constructor.exception"; } } else if (isInitializer()) { // Initializer with uncaught exception. errorMsg = "initializer.exception"; } else { // Method with uncaught exception. errorMsg = "uncaught.exception"; } env.error(where, errorMsg, c.getName()); } } } else { Hashtable thrown = new Hashtable(3); // small & throw-away Expression val = (Expression)getValue(); vset = val.checkInitializer(env, ctx, vset, getType(), thrown); setValue(val.convert(env, ctx, getType(), val)); // Complain about static final members of inner classes that // do not have an initializer that is a constant expression. // In general, static members are not permitted for inner // classes, but an exception is made for named constants. // Other cases of static members, including non-final ones, // are handled in 'SourceClass'. Part of fix for 4095568. if (isStatic() && isFinal() && !clazz.isTopLevel()) { if (!((Expression)getValue()).isConstant()) { env.error(where, "static.inner.field", getName(), this); setValue(null); } } // Both RuntimeExceptions and Errors should be // allowed in initializers. Fix for bug 4102541. ClassDeclaration except = env.getClassDeclaration(idJavaLangThrowable); ClassDeclaration ignore1 = env.getClassDeclaration(idJavaLangError); ClassDeclaration ignore2 = env.getClassDeclaration(idJavaLangRuntimeException); for (Enumeration e = thrown.keys(); e.hasMoreElements(); ) { ClassDeclaration c = (ClassDeclaration)e.nextElement(); ClassDefinition def = c.getClassDefinition(env); if (!def.subClassOf(env, ignore1) && !def.subClassOf(env, ignore2) && def.subClassOf(env, except)) { Node n = (Node)thrown.get(c); env.error(n.getWhere(), "initializer.exception", c.getName()); } } } if (env.dump()) { getValue().print(System.out); System.out.println(); } } status = getClassDefinition().getError() ? ERROR : CHECKED; } // Initializers (static and instance) must be able to complete normally. if (isInitializer() && vset.isDeadEnd()) { env.error(where, "init.no.normal.completion"); vset = vset.clearDeadEnd(); } return vset; } // helper to check(): synthesize a missing super() call private Expression getDefaultSuperCall(Environment env) { Expression se = null; ClassDefinition sclass = getClassDefinition().getSuperClass().getClassDefinition(); // does the superclass constructor require an enclosing instance? ClassDefinition reqc = (sclass == null) ? null : sclass.isTopLevel() ? null : sclass.getOuterClass(); ClassDefinition thisc = getClassDefinition(); if (reqc != null && !Context.outerLinkExists(env, reqc, thisc)) { se = new SuperExpression(where, new NullExpression(where)); env.error(where, "no.default.outer.arg", reqc, getClassDefinition()); } if (se == null) { se = new SuperExpression(where); } return new MethodExpression(where, se, idInit, new Expression[0]); } /** * Inline the field */ void inline(Environment env) throws ClassNotFound { switch (status) { case PARSED: check(env); inline(env); break; case CHECKED: if (env.dump()) { System.out.println("[inline field " + getClassDeclaration().getName() + "." + getName() + "]"); } status = INLINING; env = new Environment(env, this); if (isMethod()) { if ((!isNative()) && (!isAbstract())) { Statement s = (Statement)getValue(); Context ctx = new Context((Context)null, this); for (Enumeration e = args.elements() ; e.hasMoreElements() ;) { LocalMember local = (LocalMember)e.nextElement(); ctx.declare(env, local); } setValue(s.inline(env, ctx)); } } else if (isInnerClass()) { // some classes are checked and inlined separately ClassDefinition nc = getInnerClass(); if (nc instanceof SourceClass && !nc.isLocal() && nc.isInsideLocal()) { status = INLINING; ((SourceClass)nc).inlineLocalClass(env); } status = INLINED; break; } else { if (getValue() != null) { Context ctx = new Context((Context)null, this); if (!isStatic()) { // Cf. "thisArg" in SourceClass.checkMembers(). Context ctxInst = new Context(ctx, this); LocalMember thisArg = ((SourceClass)clazz).getThisArgument(); ctxInst.declare(env, thisArg); setValue(((Expression)getValue()) .inlineValue(env, ctxInst)); } else { setValue(((Expression)getValue()) .inlineValue(env, ctx)); } } } if (env.dump()) { System.out.println("[inlined field " + getClassDeclaration().getName() + "." + getName() + "]"); if (getValue() != null) { getValue().print(System.out); System.out.println(); } else { System.out.println("<empty>"); } } status = INLINED; break; } } /** * Get the value of the field (or null if the value can't be determined) */ public Node getValue(Environment env) throws ClassNotFound { Node value = getValue(); if (value != null && status != INLINED) { // be sure to get the imports right: env = ((SourceClass)clazz).setupEnv(env); inline(env); value = (status == INLINED) ? getValue() : null; } return value; } public boolean isInlineable(Environment env, boolean fromFinal) throws ClassNotFound { if (super.isInlineable(env, fromFinal)) { getValue(env); return (status == INLINED) && !getClassDefinition().getError(); } return false; } /** * Get the initial value of the field */ public Object getInitialValue() { if (isMethod() || (getValue() == null) || (!isFinal()) || (status != INLINED)) { return null; } return ((Expression)getValue()).getValue(); } /** * Generate code */ public void code(Environment env, Assembler asm) throws ClassNotFound { switch (status) { case PARSED: check(env); code(env, asm); return; case CHECKED: inline(env); code(env, asm); return; case INLINED: // Actually generate code if (env.dump()) { System.out.println("[code field " + getClassDeclaration().getName() + "." + getName() + "]"); } if (isMethod() && (!isNative()) && (!isAbstract())) { env = new Environment(env, this); Context ctx = new Context((Context)null, this); Statement s = (Statement)getValue(); for (Enumeration e = args.elements() ; e.hasMoreElements() ; ) { LocalMember f = (LocalMember)e.nextElement(); ctx.declare(env, f); //ctx.declare(env, (LocalMember)e.nextElement()); } /* if (isConstructor() && ((s == null) || (s.firstConstructor() == null))) { ClassDeclaration c = getClassDefinition().getSuperClass(); if (c != null) { MemberDefinition field = c.getClassDefinition(env).matchMethod(env, getClassDefinition(), idInit); asm.add(getWhere(), opc_aload, new Integer(0)); asm.add(getWhere(), opc_invokespecial, field); asm.add(getWhere(), opc_pop); } // Output initialization code for (MemberDefinition f = getClassDefinition().getFirstMember() ; f != null ; f = f.getNextMember()) { if (!f.isStatic()) { f.codeInit(env, ctx, asm); } } } */ if (s != null) { s.code(env, ctx, asm); } if (getType().getReturnType().isType(TC_VOID) && !isInitializer()) { asm.add(getWhere(), opc_return, true); } } return; } } public void codeInit(Environment env, Context ctx, Assembler asm) throws ClassNotFound { if (isMethod()) { return; } switch (status) { case PARSED: check(env); codeInit(env, ctx, asm); return; case CHECKED: inline(env); codeInit(env, ctx, asm); return; case INLINED: // Actually generate code if (env.dump()) { System.out.println("[code initializer " + getClassDeclaration().getName() + "." + getName() + "]"); } if (getValue() != null) { Expression e = (Expression)getValue(); // The JLS Section 8.5 specifies that static (non-final) // initializers should be executed in textual order. Eliding // initializations to default values can interfere with this, // so the tests for !e.equalsDefault() have been eliminated, // below. if (isStatic()) { if (getInitialValue() == null) { // removed: && !e.equalsDefault()) { e.codeValue(env, ctx, asm); asm.add(getWhere(), opc_putstatic, this); } } else { // removed: if (!e.equalsDefault()) { // This code doesn't appear to be reached for // instance initializers. Code for these is generated // in the makeVarInits() method of the class // MethodExpression. asm.add(getWhere(), opc_aload, new Integer(0)); e.codeValue(env, ctx, asm); asm.add(getWhere(), opc_putfield, this); } } return; } } /** * Print for debugging */ public void print(PrintStream out) { super.print(out); if (getValue() != null) { getValue().print(out); out.println(); } } }
apache/hadoop
35,453
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/tools/CacheAdmin.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.tools; import java.io.IOException; import java.util.EnumSet; import java.util.LinkedList; import java.util.List; import org.apache.commons.text.WordUtils; import org.apache.hadoop.classification.InterfaceAudience; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.conf.Configured; import org.apache.hadoop.fs.CacheFlag; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.RemoteIterator; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.DistributedFileSystem; import org.apache.hadoop.hdfs.protocol.CacheDirectiveEntry; import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo; import org.apache.hadoop.hdfs.protocol.CacheDirectiveInfo.Expiration; import org.apache.hadoop.hdfs.protocol.CacheDirectiveStats; import org.apache.hadoop.hdfs.protocol.CachePoolEntry; import org.apache.hadoop.hdfs.protocol.CachePoolInfo; import org.apache.hadoop.hdfs.protocol.CachePoolStats; import org.apache.hadoop.tools.TableListing; import org.apache.hadoop.tools.TableListing.Justification; import org.apache.hadoop.util.StringUtils; import org.apache.hadoop.util.Tool; import org.apache.hadoop.thirdparty.com.google.common.base.Joiner; import org.apache.hadoop.util.ToolRunner; /** * This class implements command-line operations on the HDFS Cache. */ @InterfaceAudience.Private public class CacheAdmin extends Configured implements Tool { public CacheAdmin() { this(null); } public CacheAdmin(Configuration conf) { super(conf); } @Override public int run(String[] args) throws IOException { if (args.length == 0) { AdminHelper.printUsage(false, "cacheadmin", COMMANDS); ToolRunner.printGenericCommandUsage(System.err); return 1; } AdminHelper.Command command = AdminHelper.determineCommand(args[0], COMMANDS); if (command == null) { System.err.println("Can't understand command '" + args[0] + "'"); if (!args[0].startsWith("-")) { System.err.println("Command names must start with dashes."); } AdminHelper.printUsage(false, "cacheadmin", COMMANDS); ToolRunner.printGenericCommandUsage(System.err); return 1; } List<String> argsList = new LinkedList<String>(); for (int j = 1; j < args.length; j++) { argsList.add(args[j]); } try { return command.run(getConf(), argsList); } catch (IllegalArgumentException e) { System.err.println(AdminHelper.prettifyException(e)); return -1; } } public static void main(String[] argsArray) throws Exception { CacheAdmin cacheAdmin = new CacheAdmin(new Configuration()); int res = ToolRunner.run(cacheAdmin, argsArray); System.exit(res); } private static CacheDirectiveInfo.Expiration parseExpirationString(String ttlString) throws IOException { CacheDirectiveInfo.Expiration ex = null; if (ttlString != null) { if (ttlString.equalsIgnoreCase("never")) { ex = CacheDirectiveInfo.Expiration.NEVER; } else { long ttl = DFSUtil.parseRelativeTime(ttlString); ex = CacheDirectiveInfo.Expiration.newRelative(ttl); } } return ex; } private static class AddCacheDirectiveInfoCommand implements AdminHelper.Command { @Override public String getName() { return "-addDirective"; } @Override public String getShortUsage() { return "[" + getName() + " -path <path> -pool <pool-name> " + "[-force] " + "[-replication <replication>] [-ttl <time-to-live>]]\n"; } @Override public String getLongUsage() { TableListing listing = AdminHelper.getOptionDescriptionListing(); listing.addRow("<path>", "A path to cache. The path can be " + "a directory or a file."); listing.addRow("<pool-name>", "The pool to which the directive will be " + "added. You must have write permission on the cache pool " + "in order to add new directives."); listing.addRow("-force", "Skips checking of cache pool resource limits."); listing.addRow("<replication>", "The cache replication factor to use. " + "Defaults to 1."); listing.addRow("<time-to-live>", "How long the directive is " + "valid. Can be specified in minutes, hours, and days, e.g. " + "30m, 4h, 2d. Valid units are [smhd]." + " \"never\" indicates a directive that never expires." + " If unspecified, the directive never expires."); return getShortUsage() + "\n" + "Add a new cache directive.\n\n" + listing.toString(); } @Override public int run(Configuration conf, List<String> args) throws IOException { CacheDirectiveInfo.Builder builder = new CacheDirectiveInfo.Builder(); String path = StringUtils.popOptionWithArgument("-path", args); if (path == null) { System.err.println("You must specify a path with -path."); return 1; } builder.setPath(new Path(path)); String poolName = StringUtils.popOptionWithArgument("-pool", args); if (poolName == null) { System.err.println("You must specify a pool name with -pool."); return 1; } builder.setPool(poolName); boolean force = StringUtils.popOption("-force", args); String replicationString = StringUtils.popOptionWithArgument("-replication", args); if (replicationString != null) { Short replication = Short.parseShort(replicationString); builder.setReplication(replication); } String ttlString = StringUtils.popOptionWithArgument("-ttl", args); try { Expiration ex = parseExpirationString(ttlString); if (ex != null) { builder.setExpiration(ex); } } catch (IOException e) { System.err.println( "Error while parsing ttl value: " + e.getMessage()); return 1; } if (!args.isEmpty()) { System.err.println("Can't understand argument: " + args.get(0)); return 1; } DistributedFileSystem dfs = AdminHelper.getDFS(new Path(path).toUri(), conf); CacheDirectiveInfo directive = builder.build(); EnumSet<CacheFlag> flags = EnumSet.noneOf(CacheFlag.class); if (force) { flags.add(CacheFlag.FORCE); } try { long id = dfs.addCacheDirective(directive, flags); System.out.println("Added cache directive " + id); } catch (IOException e) { System.err.println(AdminHelper.prettifyException(e)); return 2; } return 0; } } private static class RemoveCacheDirectiveInfoCommand implements AdminHelper.Command { @Override public String getName() { return "-removeDirective"; } @Override public String getShortUsage() { return "[" + getName() + " <id>]\n"; } @Override public String getLongUsage() { TableListing listing = AdminHelper.getOptionDescriptionListing(); listing.addRow("<id>", "The id of the cache directive to remove. " + "You must have write permission on the pool of the " + "directive in order to remove it. To see a list " + "of cache directive IDs, use the -listDirectives command."); return getShortUsage() + "\n" + "Remove a cache directive.\n\n" + listing.toString(); } @Override public int run(Configuration conf, List<String> args) throws IOException { String idString= StringUtils.popFirstNonOption(args); if (idString == null) { System.err.println("You must specify a directive ID to remove."); return 1; } long id; try { id = Long.parseLong(idString); } catch (NumberFormatException e) { System.err.println("Invalid directive ID " + idString + ": expected " + "a numeric value."); return 1; } if (id <= 0) { System.err.println("Invalid directive ID " + id + ": ids must " + "be greater than 0."); return 1; } if (!args.isEmpty()) { System.err.println("Can't understand argument: " + args.get(0)); System.err.println("Usage is " + getShortUsage()); return 1; } DistributedFileSystem dfs = AdminHelper.getDFS(conf); try { dfs.getClient().removeCacheDirective(id); System.out.println("Removed cached directive " + id); } catch (IOException e) { System.err.println(AdminHelper.prettifyException(e)); return 2; } return 0; } } private static class ModifyCacheDirectiveInfoCommand implements AdminHelper.Command { @Override public String getName() { return "-modifyDirective"; } @Override public String getShortUsage() { return "[" + getName() + " -id <id> [-path <path>] [-force] [-replication <replication>] " + "[-pool <pool-name>] [-ttl <time-to-live>]]\n"; } @Override public String getLongUsage() { TableListing listing = AdminHelper.getOptionDescriptionListing(); listing.addRow("<id>", "The ID of the directive to modify (required)"); listing.addRow("<path>", "A path to cache. The path can be " + "a directory or a file. (optional)"); listing.addRow("-force", "Skips checking of cache pool resource limits."); listing.addRow("<replication>", "The cache replication factor to use. " + "(optional)"); listing.addRow("<pool-name>", "The pool to which the directive will be " + "added. You must have write permission on the cache pool " + "in order to move a directive into it. (optional)"); listing.addRow("<time-to-live>", "How long the directive is " + "valid. Can be specified in minutes, hours, and days, e.g. " + "30m, 4h, 2d. Valid units are [smhd]." + " \"never\" indicates a directive that never expires."); return getShortUsage() + "\n" + "Modify a cache directive.\n\n" + listing.toString(); } @Override public int run(Configuration conf, List<String> args) throws IOException { CacheDirectiveInfo.Builder builder = new CacheDirectiveInfo.Builder(); boolean modified = false; String idString = StringUtils.popOptionWithArgument("-id", args); if (idString == null) { System.err.println("You must specify a directive ID with -id."); return 1; } builder.setId(Long.parseLong(idString)); String path = StringUtils.popOptionWithArgument("-path", args); if (path != null) { builder.setPath(new Path(path)); modified = true; } boolean force = StringUtils.popOption("-force", args); String replicationString = StringUtils.popOptionWithArgument("-replication", args); if (replicationString != null) { builder.setReplication(Short.parseShort(replicationString)); modified = true; } String poolName = StringUtils.popOptionWithArgument("-pool", args); if (poolName != null) { builder.setPool(poolName); modified = true; } String ttlString = StringUtils.popOptionWithArgument("-ttl", args); try { Expiration ex = parseExpirationString(ttlString); if (ex != null) { builder.setExpiration(ex); modified = true; } } catch (IOException e) { System.err.println( "Error while parsing ttl value: " + e.getMessage()); return 1; } if (!args.isEmpty()) { System.err.println("Can't understand argument: " + args.get(0)); System.err.println("Usage is " + getShortUsage()); return 1; } if (!modified) { System.err.println("No modifications were specified."); return 1; } DistributedFileSystem dfs = AdminHelper.getDFS(conf); EnumSet<CacheFlag> flags = EnumSet.noneOf(CacheFlag.class); if (force) { flags.add(CacheFlag.FORCE); } try { dfs.modifyCacheDirective(builder.build(), flags); System.out.println("Modified cache directive " + idString); } catch (IOException e) { System.err.println(AdminHelper.prettifyException(e)); return 2; } return 0; } } private static class RemoveCacheDirectiveInfosCommand implements AdminHelper.Command { @Override public String getName() { return "-removeDirectives"; } @Override public String getShortUsage() { return "[" + getName() + " -path <path>]\n"; } @Override public String getLongUsage() { TableListing listing = AdminHelper.getOptionDescriptionListing(); listing.addRow("-path <path>", "The path of the cache directives to remove. " + "You must have write permission on the pool of the directive in order " + "to remove it. To see a list of cache directives, use the " + "-listDirectives command."); return getShortUsage() + "\n" + "Remove every cache directive with the specified path.\n\n" + listing.toString(); } @Override public int run(Configuration conf, List<String> args) throws IOException { String path = StringUtils.popOptionWithArgument("-path", args); if (path == null) { System.err.println("You must specify a path with -path."); return 1; } if (!args.isEmpty()) { System.err.println("Can't understand argument: " + args.get(0)); System.err.println("Usage is " + getShortUsage()); return 1; } int exitCode = 0; try { DistributedFileSystem dfs = AdminHelper.getDFS(new Path(path).toUri(), conf); RemoteIterator<CacheDirectiveEntry> iter = dfs.listCacheDirectives( new CacheDirectiveInfo.Builder(). setPath(new Path(path)).build()); while (iter.hasNext()) { CacheDirectiveEntry entry = iter.next(); try { dfs.removeCacheDirective(entry.getInfo().getId()); System.out.println("Removed cache directive " + entry.getInfo().getId()); } catch (IOException e) { System.err.println(AdminHelper.prettifyException(e)); exitCode = 2; } } } catch (IOException e) { System.err.println(AdminHelper.prettifyException(e)); exitCode = 2; } if (exitCode == 0) { System.out.println("Removed every cache directive with path " + path); } return exitCode; } } private static class ListCacheDirectiveInfoCommand implements AdminHelper.Command { @Override public String getName() { return "-listDirectives"; } @Override public String getShortUsage() { return "[" + getName() + " [-stats] [-path <path>] [-pool <pool>] [-id <id>]" + "]\n"; } @Override public String getLongUsage() { TableListing listing = AdminHelper.getOptionDescriptionListing(); listing.addRow("-stats", "List path-based cache directive statistics."); listing.addRow("<path>", "List only " + "cache directives with this path. " + "Note that if there is a cache directive for <path> " + "in a cache pool that we don't have read access for, it " + "will not be listed."); listing.addRow("<pool>", "List only path cache directives in that pool."); listing.addRow("<id>", "List the cache directive with this id."); return getShortUsage() + "\n" + "List cache directives.\n\n" + listing.toString(); } @Override public int run(Configuration conf, List<String> args) throws IOException { CacheDirectiveInfo.Builder builder = new CacheDirectiveInfo.Builder(); String pathFilter = StringUtils.popOptionWithArgument("-path", args); if (pathFilter != null) { builder.setPath(new Path(pathFilter)); } String poolFilter = StringUtils.popOptionWithArgument("-pool", args); if (poolFilter != null) { builder.setPool(poolFilter); } boolean printStats = StringUtils.popOption("-stats", args); String idFilter = StringUtils.popOptionWithArgument("-id", args); if (idFilter != null) { builder.setId(Long.parseLong(idFilter)); } if (!args.isEmpty()) { System.err.println("Can't understand argument: " + args.get(0)); return 1; } TableListing.Builder tableBuilder = new TableListing.Builder(). addField("ID", Justification.RIGHT). addField("POOL", Justification.LEFT). addField("REPL", Justification.RIGHT). addField("EXPIRY", Justification.LEFT). addField("PATH", Justification.LEFT); if (printStats) { tableBuilder.addField("BYTES_NEEDED", Justification.RIGHT). addField("BYTES_CACHED", Justification.RIGHT). addField("FILES_NEEDED", Justification.RIGHT). addField("FILES_CACHED", Justification.RIGHT); } TableListing tableListing = tableBuilder.build(); try { DistributedFileSystem dfs = AdminHelper.getDFS(conf); RemoteIterator<CacheDirectiveEntry> iter = dfs.listCacheDirectives(builder.build()); int numEntries = 0; while (iter.hasNext()) { CacheDirectiveEntry entry = iter.next(); CacheDirectiveInfo directive = entry.getInfo(); CacheDirectiveStats stats = entry.getStats(); List<String> row = new LinkedList<String>(); row.add("" + directive.getId()); row.add(directive.getPool()); row.add("" + directive.getReplication()); String expiry; // This is effectively never, round for nice printing if (directive.getExpiration().getMillis() > Expiration.MAX_RELATIVE_EXPIRY_MS / 2) { expiry = "never"; } else { expiry = directive.getExpiration().toString(); } row.add(expiry); row.add(directive.getPath().toUri().getPath()); if (printStats) { row.add("" + stats.getBytesNeeded()); row.add("" + stats.getBytesCached()); row.add("" + stats.getFilesNeeded()); row.add("" + stats.getFilesCached()); } tableListing.addRow(row.toArray(new String[row.size()])); numEntries++; } System.out.print(String.format("Found %d entr%s%n", numEntries, numEntries == 1 ? "y" : "ies")); if (numEntries > 0) { System.out.print(tableListing); } } catch (IOException e) { System.err.println(AdminHelper.prettifyException(e)); return 2; } return 0; } } private static class AddCachePoolCommand implements AdminHelper.Command { private static final String NAME = "-addPool"; @Override public String getName() { return NAME; } @Override public String getShortUsage() { return "[" + NAME + " <name> [-owner <owner>] " + "[-group <group>] [-mode <mode>] [-limit <limit>] " + "[-defaultReplication <defaultReplication>] [-maxTtl <maxTtl>]" + "]\n"; } @Override public String getLongUsage() { TableListing listing = AdminHelper.getOptionDescriptionListing(); listing.addRow("<name>", "Name of the new pool."); listing.addRow("<owner>", "Username of the owner of the pool. " + "Defaults to the current user."); listing.addRow("<group>", "Group of the pool. " + "Defaults to the primary group name of the current user."); listing.addRow("<mode>", "UNIX-style permissions for the pool. " + "Permissions are specified in octal, e.g. 0755. " + "By default, this is set to " + String.format("0%03o", FsPermission.getCachePoolDefault().toShort()) + "."); listing.addRow("<limit>", "The maximum number of bytes that can be " + "cached by directives in this pool, in aggregate. By default, " + "no limit is set."); listing.addRow("<defaultReplication>", "The default replication " + "number for cache directive in the pool. " + "If not set, the replication is set to 1"); listing.addRow("<maxTtl>", "The maximum allowed time-to-live for " + "directives being added to the pool. This can be specified in " + "seconds, minutes, hours, and days, e.g. 120s, 30m, 4h, 2d. " + "Valid units are [smhd]. By default, no maximum is set. " + "A value of \"never\" specifies that there is no limit."); return getShortUsage() + "\n" + "Add a new cache pool.\n\n" + listing.toString(); } @Override public int run(Configuration conf, List<String> args) throws IOException { String name = StringUtils.popFirstNonOption(args); if (name == null) { System.err.println("You must specify a name when creating a " + "cache pool."); return 1; } CachePoolInfo info = new CachePoolInfo(name); String owner = StringUtils.popOptionWithArgument("-owner", args); if (owner != null) { info.setOwnerName(owner); } String group = StringUtils.popOptionWithArgument("-group", args); if (group != null) { info.setGroupName(group); } String modeString = StringUtils.popOptionWithArgument("-mode", args); if (modeString != null) { short mode = Short.parseShort(modeString, 8); info.setMode(new FsPermission(mode)); } String limitString = StringUtils.popOptionWithArgument("-limit", args); Long limit = AdminHelper.parseLimitString(limitString); if (limit != null) { info.setLimit(limit); } String replicationString = StringUtils. popOptionWithArgument("-defaultReplication", args); if (replicationString != null) { short defaultReplication = Short.parseShort(replicationString); info.setDefaultReplication(defaultReplication); } String maxTtlString = StringUtils.popOptionWithArgument("-maxTtl", args); try { Long maxTtl = AdminHelper.parseTtlString(maxTtlString); if (maxTtl != null) { info.setMaxRelativeExpiryMs(maxTtl); } } catch (IOException e) { System.err.println( "Error while parsing maxTtl value: " + e.getMessage()); return 1; } if (!args.isEmpty()) { System.err.print("Can't understand arguments: " + Joiner.on(" ").join(args) + "\n"); System.err.println("Usage is " + getShortUsage()); return 1; } DistributedFileSystem dfs = AdminHelper.getDFS(conf); try { dfs.addCachePool(info); } catch (IOException e) { System.err.println(AdminHelper.prettifyException(e)); return 2; } System.out.println("Successfully added cache pool " + name + "."); return 0; } } private static class ModifyCachePoolCommand implements AdminHelper.Command { @Override public String getName() { return "-modifyPool"; } @Override public String getShortUsage() { return "[" + getName() + " <name> [-owner <owner>] " + "[-group <group>] [-mode <mode>] [-limit <limit>] " + "[-defaultReplication <defaultReplication>] [-maxTtl <maxTtl>]]\n"; } @Override public String getLongUsage() { TableListing listing = AdminHelper.getOptionDescriptionListing(); listing.addRow("<name>", "Name of the pool to modify."); listing.addRow("<owner>", "Username of the owner of the pool"); listing.addRow("<group>", "Groupname of the group of the pool."); listing.addRow("<mode>", "Unix-style permissions of the pool in octal."); listing.addRow("<limit>", "Maximum number of bytes that can be cached " + "by this pool."); listing.addRow("<defaultReplication>", "Default replication num for " + "directives in this pool"); listing.addRow("<maxTtl>", "The maximum allowed time-to-live for " + "directives being added to the pool."); return getShortUsage() + "\n" + WordUtils.wrap("Modifies the metadata of an existing cache pool. " + "See usage of " + AddCachePoolCommand.NAME + " for more details.", AdminHelper.MAX_LINE_WIDTH) + "\n\n" + listing.toString(); } @Override public int run(Configuration conf, List<String> args) throws IOException { String owner = StringUtils.popOptionWithArgument("-owner", args); String group = StringUtils.popOptionWithArgument("-group", args); String modeString = StringUtils.popOptionWithArgument("-mode", args); Integer mode = (modeString == null) ? null : Integer.parseInt(modeString, 8); String limitString = StringUtils.popOptionWithArgument("-limit", args); Long limit = AdminHelper.parseLimitString(limitString); String replicationString = StringUtils.popOptionWithArgument("-defaultReplication", args); Short defaultReplication = null; if (replicationString != null) { defaultReplication = Short.parseShort(replicationString); } String maxTtlString = StringUtils.popOptionWithArgument("-maxTtl", args); Long maxTtl; try { maxTtl = AdminHelper.parseTtlString(maxTtlString); } catch (IOException e) { System.err.println( "Error while parsing maxTtl value: " + e.getMessage()); return 1; } String name = StringUtils.popFirstNonOption(args); if (name == null) { System.err.println("You must specify a name when creating a " + "cache pool."); return 1; } if (!args.isEmpty()) { System.err.print("Can't understand arguments: " + Joiner.on(" ").join(args) + "\n"); System.err.println("Usage is " + getShortUsage()); return 1; } boolean changed = false; CachePoolInfo info = new CachePoolInfo(name); if (owner != null) { info.setOwnerName(owner); changed = true; } if (group != null) { info.setGroupName(group); changed = true; } if (mode != null) { info.setMode(new FsPermission(mode.shortValue())); changed = true; } if (limit != null) { info.setLimit(limit); changed = true; } if (defaultReplication != null) { info.setDefaultReplication(defaultReplication); changed = true; } if (maxTtl != null) { info.setMaxRelativeExpiryMs(maxTtl); changed = true; } if (!changed) { System.err.println("You must specify at least one attribute to " + "change in the cache pool."); return 1; } DistributedFileSystem dfs = AdminHelper.getDFS(conf); try { dfs.modifyCachePool(info); } catch (IOException e) { System.err.println(AdminHelper.prettifyException(e)); return 2; } System.out.print("Successfully modified cache pool " + name); String prefix = " to have "; if (owner != null) { System.out.print(prefix + "owner name " + owner); prefix = " and "; } if (group != null) { System.out.print(prefix + "group name " + group); prefix = " and "; } if (mode != null) { System.out.print(prefix + "mode " + new FsPermission(mode.shortValue())); prefix = " and "; } if (limit != null) { System.out.print(prefix + "limit " + limit); prefix = " and "; } if (defaultReplication != null) { System.out.println(prefix + "replication " + defaultReplication); prefix = " replication "; } if (maxTtl != null) { System.out.print(prefix + "max time-to-live " + maxTtlString); } System.out.print("\n"); return 0; } } private static class RemoveCachePoolCommand implements AdminHelper.Command { @Override public String getName() { return "-removePool"; } @Override public String getShortUsage() { return "[" + getName() + " <name>]\n"; } @Override public String getLongUsage() { return getShortUsage() + "\n" + WordUtils.wrap("Remove a cache pool. This also uncaches paths " + "associated with the pool.\n\n", AdminHelper.MAX_LINE_WIDTH) + "<name> Name of the cache pool to remove.\n"; } @Override public int run(Configuration conf, List<String> args) throws IOException { String name = StringUtils.popFirstNonOption(args); if (name == null) { System.err.println("You must specify a name when deleting a " + "cache pool."); return 1; } if (!args.isEmpty()) { System.err.print("Can't understand arguments: " + Joiner.on(" ").join(args) + "\n"); System.err.println("Usage is " + getShortUsage()); return 1; } DistributedFileSystem dfs = AdminHelper.getDFS(conf); try { dfs.removeCachePool(name); } catch (IOException e) { System.err.println(AdminHelper.prettifyException(e)); return 2; } System.out.println("Successfully removed cache pool " + name + "."); return 0; } } private static class ListCachePoolsCommand implements AdminHelper.Command { @Override public String getName() { return "-listPools"; } @Override public String getShortUsage() { return "[" + getName() + " [-stats] [<name>]]\n"; } @Override public String getLongUsage() { TableListing listing = AdminHelper.getOptionDescriptionListing(); listing.addRow("-stats", "Display additional cache pool statistics."); listing.addRow("<name>", "If specified, list only the named cache pool."); return getShortUsage() + "\n" + WordUtils.wrap("Display information about one or more cache pools, " + "e.g. name, owner, group, permissions, etc.", AdminHelper.MAX_LINE_WIDTH) + "\n\n" + listing.toString(); } @Override public int run(Configuration conf, List<String> args) throws IOException { String name = StringUtils.popFirstNonOption(args); final boolean printStats = StringUtils.popOption("-stats", args); if (!args.isEmpty()) { System.err.print("Can't understand arguments: " + Joiner.on(" ").join(args) + "\n"); System.err.println("Usage is " + getShortUsage()); return 1; } DistributedFileSystem dfs = AdminHelper.getDFS(conf); TableListing.Builder builder = new TableListing.Builder(). addField("NAME", Justification.LEFT). addField("OWNER", Justification.LEFT). addField("GROUP", Justification.LEFT). addField("MODE", Justification.LEFT). addField("LIMIT", Justification.RIGHT). addField("MAXTTL", Justification.RIGHT). addField("DEFAULT_REPLICATION", Justification.RIGHT); if (printStats) { builder. addField("BYTES_NEEDED", Justification.RIGHT). addField("BYTES_CACHED", Justification.RIGHT). addField("BYTES_OVERLIMIT", Justification.RIGHT). addField("FILES_NEEDED", Justification.RIGHT). addField("FILES_CACHED", Justification.RIGHT); } TableListing listing = builder.build(); int numResults = 0; try { RemoteIterator<CachePoolEntry> iter = dfs.listCachePools(); while (iter.hasNext()) { CachePoolEntry entry = iter.next(); CachePoolInfo info = entry.getInfo(); LinkedList<String> row = new LinkedList<String>(); if (name == null || info.getPoolName().equals(name)) { row.add(info.getPoolName()); row.add(info.getOwnerName()); row.add(info.getGroupName()); row.add(info.getMode() != null ? info.getMode().toString() : null); Long limit = info.getLimit(); String limitString; if (limit != null && limit.equals(CachePoolInfo.LIMIT_UNLIMITED)) { limitString = "unlimited"; } else { limitString = "" + limit; } row.add(limitString); Long maxTtl = info.getMaxRelativeExpiryMs(); String maxTtlString = null; if (maxTtl != null) { if (maxTtl == CachePoolInfo.RELATIVE_EXPIRY_NEVER) { maxTtlString = "never"; } else { maxTtlString = DFSUtil.durationToString(maxTtl); } } row.add(maxTtlString); row.add("" + info.getDefaultReplication()); if (printStats) { CachePoolStats stats = entry.getStats(); row.add(Long.toString(stats.getBytesNeeded())); row.add(Long.toString(stats.getBytesCached())); row.add(Long.toString(stats.getBytesOverlimit())); row.add(Long.toString(stats.getFilesNeeded())); row.add(Long.toString(stats.getFilesCached())); } listing.addRow(row.toArray(new String[row.size()])); ++numResults; if (name != null) { break; } } } } catch (IOException e) { System.err.println(AdminHelper.prettifyException(e)); return 2; } System.out.print(String.format("Found %d result%s.%n", numResults, (numResults == 1 ? "" : "s"))); if (numResults > 0) { System.out.print(listing); } // If list pools succeed, we return 0 (success exit code) return 0; } } private static final AdminHelper.Command[] COMMANDS = { new AddCacheDirectiveInfoCommand(), new ModifyCacheDirectiveInfoCommand(), new ListCacheDirectiveInfoCommand(), new RemoveCacheDirectiveInfoCommand(), new RemoveCacheDirectiveInfosCommand(), new AddCachePoolCommand(), new ModifyCachePoolCommand(), new RemoveCachePoolCommand(), new ListCachePoolsCommand() }; }
googleapis/google-cloud-java
35,986
java-retail/proto-google-cloud-retail-v2alpha/src/main/java/com/google/cloud/retail/v2alpha/ListMerchantCenterAccountLinksResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/retail/v2alpha/merchant_center_account_link_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.retail.v2alpha; /** * * * <pre> * Response for * [MerchantCenterAccountLinkService.ListMerchantCenterAccountLinks][google.cloud.retail.v2alpha.MerchantCenterAccountLinkService.ListMerchantCenterAccountLinks] * method. * </pre> * * Protobuf type {@code google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse} */ public final class ListMerchantCenterAccountLinksResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse) ListMerchantCenterAccountLinksResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListMerchantCenterAccountLinksResponse.newBuilder() to construct. private ListMerchantCenterAccountLinksResponse( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListMerchantCenterAccountLinksResponse() { merchantCenterAccountLinks_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListMerchantCenterAccountLinksResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.retail.v2alpha.MerchantCenterAccountLinkServiceProto .internal_static_google_cloud_retail_v2alpha_ListMerchantCenterAccountLinksResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.retail.v2alpha.MerchantCenterAccountLinkServiceProto .internal_static_google_cloud_retail_v2alpha_ListMerchantCenterAccountLinksResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse.class, com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse.Builder.class); } public static final int MERCHANT_CENTER_ACCOUNT_LINKS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.retail.v2alpha.MerchantCenterAccountLink> merchantCenterAccountLinks_; /** * * * <pre> * The links. * </pre> * * <code> * repeated .google.cloud.retail.v2alpha.MerchantCenterAccountLink merchant_center_account_links = 1; * </code> */ @java.lang.Override public java.util.List<com.google.cloud.retail.v2alpha.MerchantCenterAccountLink> getMerchantCenterAccountLinksList() { return merchantCenterAccountLinks_; } /** * * * <pre> * The links. * </pre> * * <code> * repeated .google.cloud.retail.v2alpha.MerchantCenterAccountLink merchant_center_account_links = 1; * </code> */ @java.lang.Override public java.util.List< ? extends com.google.cloud.retail.v2alpha.MerchantCenterAccountLinkOrBuilder> getMerchantCenterAccountLinksOrBuilderList() { return merchantCenterAccountLinks_; } /** * * * <pre> * The links. * </pre> * * <code> * repeated .google.cloud.retail.v2alpha.MerchantCenterAccountLink merchant_center_account_links = 1; * </code> */ @java.lang.Override public int getMerchantCenterAccountLinksCount() { return merchantCenterAccountLinks_.size(); } /** * * * <pre> * The links. * </pre> * * <code> * repeated .google.cloud.retail.v2alpha.MerchantCenterAccountLink merchant_center_account_links = 1; * </code> */ @java.lang.Override public com.google.cloud.retail.v2alpha.MerchantCenterAccountLink getMerchantCenterAccountLinks( int index) { return merchantCenterAccountLinks_.get(index); } /** * * * <pre> * The links. * </pre> * * <code> * repeated .google.cloud.retail.v2alpha.MerchantCenterAccountLink merchant_center_account_links = 1; * </code> */ @java.lang.Override public com.google.cloud.retail.v2alpha.MerchantCenterAccountLinkOrBuilder getMerchantCenterAccountLinksOrBuilder(int index) { return merchantCenterAccountLinks_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < merchantCenterAccountLinks_.size(); i++) { output.writeMessage(1, merchantCenterAccountLinks_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < merchantCenterAccountLinks_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 1, merchantCenterAccountLinks_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse)) { return super.equals(obj); } com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse other = (com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse) obj; if (!getMerchantCenterAccountLinksList().equals(other.getMerchantCenterAccountLinksList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getMerchantCenterAccountLinksCount() > 0) { hash = (37 * hash) + MERCHANT_CENTER_ACCOUNT_LINKS_FIELD_NUMBER; hash = (53 * hash) + getMerchantCenterAccountLinksList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response for * [MerchantCenterAccountLinkService.ListMerchantCenterAccountLinks][google.cloud.retail.v2alpha.MerchantCenterAccountLinkService.ListMerchantCenterAccountLinks] * method. * </pre> * * Protobuf type {@code google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse) com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.retail.v2alpha.MerchantCenterAccountLinkServiceProto .internal_static_google_cloud_retail_v2alpha_ListMerchantCenterAccountLinksResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.retail.v2alpha.MerchantCenterAccountLinkServiceProto .internal_static_google_cloud_retail_v2alpha_ListMerchantCenterAccountLinksResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse.class, com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse.Builder.class); } // Construct using // com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (merchantCenterAccountLinksBuilder_ == null) { merchantCenterAccountLinks_ = java.util.Collections.emptyList(); } else { merchantCenterAccountLinks_ = null; merchantCenterAccountLinksBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.retail.v2alpha.MerchantCenterAccountLinkServiceProto .internal_static_google_cloud_retail_v2alpha_ListMerchantCenterAccountLinksResponse_descriptor; } @java.lang.Override public com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse getDefaultInstanceForType() { return com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse .getDefaultInstance(); } @java.lang.Override public com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse build() { com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse buildPartial() { com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse result = new com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse result) { if (merchantCenterAccountLinksBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { merchantCenterAccountLinks_ = java.util.Collections.unmodifiableList(merchantCenterAccountLinks_); bitField0_ = (bitField0_ & ~0x00000001); } result.merchantCenterAccountLinks_ = merchantCenterAccountLinks_; } else { result.merchantCenterAccountLinks_ = merchantCenterAccountLinksBuilder_.build(); } } private void buildPartial0( com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse result) { int from_bitField0_ = bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse) { return mergeFrom( (com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse other) { if (other == com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse .getDefaultInstance()) return this; if (merchantCenterAccountLinksBuilder_ == null) { if (!other.merchantCenterAccountLinks_.isEmpty()) { if (merchantCenterAccountLinks_.isEmpty()) { merchantCenterAccountLinks_ = other.merchantCenterAccountLinks_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureMerchantCenterAccountLinksIsMutable(); merchantCenterAccountLinks_.addAll(other.merchantCenterAccountLinks_); } onChanged(); } } else { if (!other.merchantCenterAccountLinks_.isEmpty()) { if (merchantCenterAccountLinksBuilder_.isEmpty()) { merchantCenterAccountLinksBuilder_.dispose(); merchantCenterAccountLinksBuilder_ = null; merchantCenterAccountLinks_ = other.merchantCenterAccountLinks_; bitField0_ = (bitField0_ & ~0x00000001); merchantCenterAccountLinksBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getMerchantCenterAccountLinksFieldBuilder() : null; } else { merchantCenterAccountLinksBuilder_.addAllMessages(other.merchantCenterAccountLinks_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.retail.v2alpha.MerchantCenterAccountLink m = input.readMessage( com.google.cloud.retail.v2alpha.MerchantCenterAccountLink.parser(), extensionRegistry); if (merchantCenterAccountLinksBuilder_ == null) { ensureMerchantCenterAccountLinksIsMutable(); merchantCenterAccountLinks_.add(m); } else { merchantCenterAccountLinksBuilder_.addMessage(m); } break; } // case 10 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.retail.v2alpha.MerchantCenterAccountLink> merchantCenterAccountLinks_ = java.util.Collections.emptyList(); private void ensureMerchantCenterAccountLinksIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { merchantCenterAccountLinks_ = new java.util.ArrayList<com.google.cloud.retail.v2alpha.MerchantCenterAccountLink>( merchantCenterAccountLinks_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.retail.v2alpha.MerchantCenterAccountLink, com.google.cloud.retail.v2alpha.MerchantCenterAccountLink.Builder, com.google.cloud.retail.v2alpha.MerchantCenterAccountLinkOrBuilder> merchantCenterAccountLinksBuilder_; /** * * * <pre> * The links. * </pre> * * <code> * repeated .google.cloud.retail.v2alpha.MerchantCenterAccountLink merchant_center_account_links = 1; * </code> */ public java.util.List<com.google.cloud.retail.v2alpha.MerchantCenterAccountLink> getMerchantCenterAccountLinksList() { if (merchantCenterAccountLinksBuilder_ == null) { return java.util.Collections.unmodifiableList(merchantCenterAccountLinks_); } else { return merchantCenterAccountLinksBuilder_.getMessageList(); } } /** * * * <pre> * The links. * </pre> * * <code> * repeated .google.cloud.retail.v2alpha.MerchantCenterAccountLink merchant_center_account_links = 1; * </code> */ public int getMerchantCenterAccountLinksCount() { if (merchantCenterAccountLinksBuilder_ == null) { return merchantCenterAccountLinks_.size(); } else { return merchantCenterAccountLinksBuilder_.getCount(); } } /** * * * <pre> * The links. * </pre> * * <code> * repeated .google.cloud.retail.v2alpha.MerchantCenterAccountLink merchant_center_account_links = 1; * </code> */ public com.google.cloud.retail.v2alpha.MerchantCenterAccountLink getMerchantCenterAccountLinks( int index) { if (merchantCenterAccountLinksBuilder_ == null) { return merchantCenterAccountLinks_.get(index); } else { return merchantCenterAccountLinksBuilder_.getMessage(index); } } /** * * * <pre> * The links. * </pre> * * <code> * repeated .google.cloud.retail.v2alpha.MerchantCenterAccountLink merchant_center_account_links = 1; * </code> */ public Builder setMerchantCenterAccountLinks( int index, com.google.cloud.retail.v2alpha.MerchantCenterAccountLink value) { if (merchantCenterAccountLinksBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMerchantCenterAccountLinksIsMutable(); merchantCenterAccountLinks_.set(index, value); onChanged(); } else { merchantCenterAccountLinksBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The links. * </pre> * * <code> * repeated .google.cloud.retail.v2alpha.MerchantCenterAccountLink merchant_center_account_links = 1; * </code> */ public Builder setMerchantCenterAccountLinks( int index, com.google.cloud.retail.v2alpha.MerchantCenterAccountLink.Builder builderForValue) { if (merchantCenterAccountLinksBuilder_ == null) { ensureMerchantCenterAccountLinksIsMutable(); merchantCenterAccountLinks_.set(index, builderForValue.build()); onChanged(); } else { merchantCenterAccountLinksBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The links. * </pre> * * <code> * repeated .google.cloud.retail.v2alpha.MerchantCenterAccountLink merchant_center_account_links = 1; * </code> */ public Builder addMerchantCenterAccountLinks( com.google.cloud.retail.v2alpha.MerchantCenterAccountLink value) { if (merchantCenterAccountLinksBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMerchantCenterAccountLinksIsMutable(); merchantCenterAccountLinks_.add(value); onChanged(); } else { merchantCenterAccountLinksBuilder_.addMessage(value); } return this; } /** * * * <pre> * The links. * </pre> * * <code> * repeated .google.cloud.retail.v2alpha.MerchantCenterAccountLink merchant_center_account_links = 1; * </code> */ public Builder addMerchantCenterAccountLinks( int index, com.google.cloud.retail.v2alpha.MerchantCenterAccountLink value) { if (merchantCenterAccountLinksBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureMerchantCenterAccountLinksIsMutable(); merchantCenterAccountLinks_.add(index, value); onChanged(); } else { merchantCenterAccountLinksBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The links. * </pre> * * <code> * repeated .google.cloud.retail.v2alpha.MerchantCenterAccountLink merchant_center_account_links = 1; * </code> */ public Builder addMerchantCenterAccountLinks( com.google.cloud.retail.v2alpha.MerchantCenterAccountLink.Builder builderForValue) { if (merchantCenterAccountLinksBuilder_ == null) { ensureMerchantCenterAccountLinksIsMutable(); merchantCenterAccountLinks_.add(builderForValue.build()); onChanged(); } else { merchantCenterAccountLinksBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The links. * </pre> * * <code> * repeated .google.cloud.retail.v2alpha.MerchantCenterAccountLink merchant_center_account_links = 1; * </code> */ public Builder addMerchantCenterAccountLinks( int index, com.google.cloud.retail.v2alpha.MerchantCenterAccountLink.Builder builderForValue) { if (merchantCenterAccountLinksBuilder_ == null) { ensureMerchantCenterAccountLinksIsMutable(); merchantCenterAccountLinks_.add(index, builderForValue.build()); onChanged(); } else { merchantCenterAccountLinksBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The links. * </pre> * * <code> * repeated .google.cloud.retail.v2alpha.MerchantCenterAccountLink merchant_center_account_links = 1; * </code> */ public Builder addAllMerchantCenterAccountLinks( java.lang.Iterable<? extends com.google.cloud.retail.v2alpha.MerchantCenterAccountLink> values) { if (merchantCenterAccountLinksBuilder_ == null) { ensureMerchantCenterAccountLinksIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, merchantCenterAccountLinks_); onChanged(); } else { merchantCenterAccountLinksBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The links. * </pre> * * <code> * repeated .google.cloud.retail.v2alpha.MerchantCenterAccountLink merchant_center_account_links = 1; * </code> */ public Builder clearMerchantCenterAccountLinks() { if (merchantCenterAccountLinksBuilder_ == null) { merchantCenterAccountLinks_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { merchantCenterAccountLinksBuilder_.clear(); } return this; } /** * * * <pre> * The links. * </pre> * * <code> * repeated .google.cloud.retail.v2alpha.MerchantCenterAccountLink merchant_center_account_links = 1; * </code> */ public Builder removeMerchantCenterAccountLinks(int index) { if (merchantCenterAccountLinksBuilder_ == null) { ensureMerchantCenterAccountLinksIsMutable(); merchantCenterAccountLinks_.remove(index); onChanged(); } else { merchantCenterAccountLinksBuilder_.remove(index); } return this; } /** * * * <pre> * The links. * </pre> * * <code> * repeated .google.cloud.retail.v2alpha.MerchantCenterAccountLink merchant_center_account_links = 1; * </code> */ public com.google.cloud.retail.v2alpha.MerchantCenterAccountLink.Builder getMerchantCenterAccountLinksBuilder(int index) { return getMerchantCenterAccountLinksFieldBuilder().getBuilder(index); } /** * * * <pre> * The links. * </pre> * * <code> * repeated .google.cloud.retail.v2alpha.MerchantCenterAccountLink merchant_center_account_links = 1; * </code> */ public com.google.cloud.retail.v2alpha.MerchantCenterAccountLinkOrBuilder getMerchantCenterAccountLinksOrBuilder(int index) { if (merchantCenterAccountLinksBuilder_ == null) { return merchantCenterAccountLinks_.get(index); } else { return merchantCenterAccountLinksBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The links. * </pre> * * <code> * repeated .google.cloud.retail.v2alpha.MerchantCenterAccountLink merchant_center_account_links = 1; * </code> */ public java.util.List< ? extends com.google.cloud.retail.v2alpha.MerchantCenterAccountLinkOrBuilder> getMerchantCenterAccountLinksOrBuilderList() { if (merchantCenterAccountLinksBuilder_ != null) { return merchantCenterAccountLinksBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(merchantCenterAccountLinks_); } } /** * * * <pre> * The links. * </pre> * * <code> * repeated .google.cloud.retail.v2alpha.MerchantCenterAccountLink merchant_center_account_links = 1; * </code> */ public com.google.cloud.retail.v2alpha.MerchantCenterAccountLink.Builder addMerchantCenterAccountLinksBuilder() { return getMerchantCenterAccountLinksFieldBuilder() .addBuilder( com.google.cloud.retail.v2alpha.MerchantCenterAccountLink.getDefaultInstance()); } /** * * * <pre> * The links. * </pre> * * <code> * repeated .google.cloud.retail.v2alpha.MerchantCenterAccountLink merchant_center_account_links = 1; * </code> */ public com.google.cloud.retail.v2alpha.MerchantCenterAccountLink.Builder addMerchantCenterAccountLinksBuilder(int index) { return getMerchantCenterAccountLinksFieldBuilder() .addBuilder( index, com.google.cloud.retail.v2alpha.MerchantCenterAccountLink.getDefaultInstance()); } /** * * * <pre> * The links. * </pre> * * <code> * repeated .google.cloud.retail.v2alpha.MerchantCenterAccountLink merchant_center_account_links = 1; * </code> */ public java.util.List<com.google.cloud.retail.v2alpha.MerchantCenterAccountLink.Builder> getMerchantCenterAccountLinksBuilderList() { return getMerchantCenterAccountLinksFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.retail.v2alpha.MerchantCenterAccountLink, com.google.cloud.retail.v2alpha.MerchantCenterAccountLink.Builder, com.google.cloud.retail.v2alpha.MerchantCenterAccountLinkOrBuilder> getMerchantCenterAccountLinksFieldBuilder() { if (merchantCenterAccountLinksBuilder_ == null) { merchantCenterAccountLinksBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.retail.v2alpha.MerchantCenterAccountLink, com.google.cloud.retail.v2alpha.MerchantCenterAccountLink.Builder, com.google.cloud.retail.v2alpha.MerchantCenterAccountLinkOrBuilder>( merchantCenterAccountLinks_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); merchantCenterAccountLinks_ = null; } return merchantCenterAccountLinksBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse) } // @@protoc_insertion_point(class_scope:google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse) private static final com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse(); } public static com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListMerchantCenterAccountLinksResponse> PARSER = new com.google.protobuf.AbstractParser<ListMerchantCenterAccountLinksResponse>() { @java.lang.Override public ListMerchantCenterAccountLinksResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListMerchantCenterAccountLinksResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListMerchantCenterAccountLinksResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.retail.v2alpha.ListMerchantCenterAccountLinksResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,901
java-dialogflow-cx/proto-google-cloud-dialogflow-cx-v3beta1/src/main/java/com/google/cloud/dialogflow/cx/v3beta1/CompareVersionsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/cx/v3beta1/version.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dialogflow.cx.v3beta1; /** * * * <pre> * The response message for * [Versions.CompareVersions][google.cloud.dialogflow.cx.v3beta1.Versions.CompareVersions]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse} */ public final class CompareVersionsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse) CompareVersionsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use CompareVersionsResponse.newBuilder() to construct. private CompareVersionsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CompareVersionsResponse() { baseVersionContentJson_ = ""; targetVersionContentJson_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CompareVersionsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3beta1.VersionProto .internal_static_google_cloud_dialogflow_cx_v3beta1_CompareVersionsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3beta1.VersionProto .internal_static_google_cloud_dialogflow_cx_v3beta1_CompareVersionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse.class, com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse.Builder.class); } private int bitField0_; public static final int BASE_VERSION_CONTENT_JSON_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object baseVersionContentJson_ = ""; /** * * * <pre> * JSON representation of the base version content. * </pre> * * <code>string base_version_content_json = 1;</code> * * @return The baseVersionContentJson. */ @java.lang.Override public java.lang.String getBaseVersionContentJson() { java.lang.Object ref = baseVersionContentJson_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); baseVersionContentJson_ = s; return s; } } /** * * * <pre> * JSON representation of the base version content. * </pre> * * <code>string base_version_content_json = 1;</code> * * @return The bytes for baseVersionContentJson. */ @java.lang.Override public com.google.protobuf.ByteString getBaseVersionContentJsonBytes() { java.lang.Object ref = baseVersionContentJson_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); baseVersionContentJson_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int TARGET_VERSION_CONTENT_JSON_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object targetVersionContentJson_ = ""; /** * * * <pre> * JSON representation of the target version content. * </pre> * * <code>string target_version_content_json = 2;</code> * * @return The targetVersionContentJson. */ @java.lang.Override public java.lang.String getTargetVersionContentJson() { java.lang.Object ref = targetVersionContentJson_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); targetVersionContentJson_ = s; return s; } } /** * * * <pre> * JSON representation of the target version content. * </pre> * * <code>string target_version_content_json = 2;</code> * * @return The bytes for targetVersionContentJson. */ @java.lang.Override public com.google.protobuf.ByteString getTargetVersionContentJsonBytes() { java.lang.Object ref = targetVersionContentJson_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); targetVersionContentJson_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int COMPARE_TIME_FIELD_NUMBER = 3; private com.google.protobuf.Timestamp compareTime_; /** * * * <pre> * The timestamp when the two version compares. * </pre> * * <code>.google.protobuf.Timestamp compare_time = 3;</code> * * @return Whether the compareTime field is set. */ @java.lang.Override public boolean hasCompareTime() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The timestamp when the two version compares. * </pre> * * <code>.google.protobuf.Timestamp compare_time = 3;</code> * * @return The compareTime. */ @java.lang.Override public com.google.protobuf.Timestamp getCompareTime() { return compareTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : compareTime_; } /** * * * <pre> * The timestamp when the two version compares. * </pre> * * <code>.google.protobuf.Timestamp compare_time = 3;</code> */ @java.lang.Override public com.google.protobuf.TimestampOrBuilder getCompareTimeOrBuilder() { return compareTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : compareTime_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(baseVersionContentJson_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, baseVersionContentJson_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(targetVersionContentJson_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, targetVersionContentJson_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(3, getCompareTime()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(baseVersionContentJson_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, baseVersionContentJson_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(targetVersionContentJson_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, targetVersionContentJson_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getCompareTime()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse)) { return super.equals(obj); } com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse other = (com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse) obj; if (!getBaseVersionContentJson().equals(other.getBaseVersionContentJson())) return false; if (!getTargetVersionContentJson().equals(other.getTargetVersionContentJson())) return false; if (hasCompareTime() != other.hasCompareTime()) return false; if (hasCompareTime()) { if (!getCompareTime().equals(other.getCompareTime())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + BASE_VERSION_CONTENT_JSON_FIELD_NUMBER; hash = (53 * hash) + getBaseVersionContentJson().hashCode(); hash = (37 * hash) + TARGET_VERSION_CONTENT_JSON_FIELD_NUMBER; hash = (53 * hash) + getTargetVersionContentJson().hashCode(); if (hasCompareTime()) { hash = (37 * hash) + COMPARE_TIME_FIELD_NUMBER; hash = (53 * hash) + getCompareTime().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The response message for * [Versions.CompareVersions][google.cloud.dialogflow.cx.v3beta1.Versions.CompareVersions]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse) com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.cx.v3beta1.VersionProto .internal_static_google_cloud_dialogflow_cx_v3beta1_CompareVersionsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.cx.v3beta1.VersionProto .internal_static_google_cloud_dialogflow_cx_v3beta1_CompareVersionsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse.class, com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse.Builder.class); } // Construct using com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getCompareTimeFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; baseVersionContentJson_ = ""; targetVersionContentJson_ = ""; compareTime_ = null; if (compareTimeBuilder_ != null) { compareTimeBuilder_.dispose(); compareTimeBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.cx.v3beta1.VersionProto .internal_static_google_cloud_dialogflow_cx_v3beta1_CompareVersionsResponse_descriptor; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse getDefaultInstanceForType() { return com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse build() { com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse buildPartial() { com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse result = new com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.baseVersionContentJson_ = baseVersionContentJson_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.targetVersionContentJson_ = targetVersionContentJson_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000004) != 0)) { result.compareTime_ = compareTimeBuilder_ == null ? compareTime_ : compareTimeBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse) { return mergeFrom((com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse other) { if (other == com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse.getDefaultInstance()) return this; if (!other.getBaseVersionContentJson().isEmpty()) { baseVersionContentJson_ = other.baseVersionContentJson_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getTargetVersionContentJson().isEmpty()) { targetVersionContentJson_ = other.targetVersionContentJson_; bitField0_ |= 0x00000002; onChanged(); } if (other.hasCompareTime()) { mergeCompareTime(other.getCompareTime()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { baseVersionContentJson_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { targetVersionContentJson_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { input.readMessage(getCompareTimeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object baseVersionContentJson_ = ""; /** * * * <pre> * JSON representation of the base version content. * </pre> * * <code>string base_version_content_json = 1;</code> * * @return The baseVersionContentJson. */ public java.lang.String getBaseVersionContentJson() { java.lang.Object ref = baseVersionContentJson_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); baseVersionContentJson_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * JSON representation of the base version content. * </pre> * * <code>string base_version_content_json = 1;</code> * * @return The bytes for baseVersionContentJson. */ public com.google.protobuf.ByteString getBaseVersionContentJsonBytes() { java.lang.Object ref = baseVersionContentJson_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); baseVersionContentJson_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * JSON representation of the base version content. * </pre> * * <code>string base_version_content_json = 1;</code> * * @param value The baseVersionContentJson to set. * @return This builder for chaining. */ public Builder setBaseVersionContentJson(java.lang.String value) { if (value == null) { throw new NullPointerException(); } baseVersionContentJson_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * JSON representation of the base version content. * </pre> * * <code>string base_version_content_json = 1;</code> * * @return This builder for chaining. */ public Builder clearBaseVersionContentJson() { baseVersionContentJson_ = getDefaultInstance().getBaseVersionContentJson(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * JSON representation of the base version content. * </pre> * * <code>string base_version_content_json = 1;</code> * * @param value The bytes for baseVersionContentJson to set. * @return This builder for chaining. */ public Builder setBaseVersionContentJsonBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); baseVersionContentJson_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object targetVersionContentJson_ = ""; /** * * * <pre> * JSON representation of the target version content. * </pre> * * <code>string target_version_content_json = 2;</code> * * @return The targetVersionContentJson. */ public java.lang.String getTargetVersionContentJson() { java.lang.Object ref = targetVersionContentJson_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); targetVersionContentJson_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * JSON representation of the target version content. * </pre> * * <code>string target_version_content_json = 2;</code> * * @return The bytes for targetVersionContentJson. */ public com.google.protobuf.ByteString getTargetVersionContentJsonBytes() { java.lang.Object ref = targetVersionContentJson_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); targetVersionContentJson_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * JSON representation of the target version content. * </pre> * * <code>string target_version_content_json = 2;</code> * * @param value The targetVersionContentJson to set. * @return This builder for chaining. */ public Builder setTargetVersionContentJson(java.lang.String value) { if (value == null) { throw new NullPointerException(); } targetVersionContentJson_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * JSON representation of the target version content. * </pre> * * <code>string target_version_content_json = 2;</code> * * @return This builder for chaining. */ public Builder clearTargetVersionContentJson() { targetVersionContentJson_ = getDefaultInstance().getTargetVersionContentJson(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * JSON representation of the target version content. * </pre> * * <code>string target_version_content_json = 2;</code> * * @param value The bytes for targetVersionContentJson to set. * @return This builder for chaining. */ public Builder setTargetVersionContentJsonBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); targetVersionContentJson_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private com.google.protobuf.Timestamp compareTime_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> compareTimeBuilder_; /** * * * <pre> * The timestamp when the two version compares. * </pre> * * <code>.google.protobuf.Timestamp compare_time = 3;</code> * * @return Whether the compareTime field is set. */ public boolean hasCompareTime() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * The timestamp when the two version compares. * </pre> * * <code>.google.protobuf.Timestamp compare_time = 3;</code> * * @return The compareTime. */ public com.google.protobuf.Timestamp getCompareTime() { if (compareTimeBuilder_ == null) { return compareTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : compareTime_; } else { return compareTimeBuilder_.getMessage(); } } /** * * * <pre> * The timestamp when the two version compares. * </pre> * * <code>.google.protobuf.Timestamp compare_time = 3;</code> */ public Builder setCompareTime(com.google.protobuf.Timestamp value) { if (compareTimeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } compareTime_ = value; } else { compareTimeBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The timestamp when the two version compares. * </pre> * * <code>.google.protobuf.Timestamp compare_time = 3;</code> */ public Builder setCompareTime(com.google.protobuf.Timestamp.Builder builderForValue) { if (compareTimeBuilder_ == null) { compareTime_ = builderForValue.build(); } else { compareTimeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The timestamp when the two version compares. * </pre> * * <code>.google.protobuf.Timestamp compare_time = 3;</code> */ public Builder mergeCompareTime(com.google.protobuf.Timestamp value) { if (compareTimeBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && compareTime_ != null && compareTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { getCompareTimeBuilder().mergeFrom(value); } else { compareTime_ = value; } } else { compareTimeBuilder_.mergeFrom(value); } if (compareTime_ != null) { bitField0_ |= 0x00000004; onChanged(); } return this; } /** * * * <pre> * The timestamp when the two version compares. * </pre> * * <code>.google.protobuf.Timestamp compare_time = 3;</code> */ public Builder clearCompareTime() { bitField0_ = (bitField0_ & ~0x00000004); compareTime_ = null; if (compareTimeBuilder_ != null) { compareTimeBuilder_.dispose(); compareTimeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The timestamp when the two version compares. * </pre> * * <code>.google.protobuf.Timestamp compare_time = 3;</code> */ public com.google.protobuf.Timestamp.Builder getCompareTimeBuilder() { bitField0_ |= 0x00000004; onChanged(); return getCompareTimeFieldBuilder().getBuilder(); } /** * * * <pre> * The timestamp when the two version compares. * </pre> * * <code>.google.protobuf.Timestamp compare_time = 3;</code> */ public com.google.protobuf.TimestampOrBuilder getCompareTimeOrBuilder() { if (compareTimeBuilder_ != null) { return compareTimeBuilder_.getMessageOrBuilder(); } else { return compareTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : compareTime_; } } /** * * * <pre> * The timestamp when the two version compares. * </pre> * * <code>.google.protobuf.Timestamp compare_time = 3;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> getCompareTimeFieldBuilder() { if (compareTimeBuilder_ == null) { compareTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( getCompareTime(), getParentForChildren(), isClean()); compareTime_ = null; } return compareTimeBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse) private static final com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse(); } public static com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CompareVersionsResponse> PARSER = new com.google.protobuf.AbstractParser<CompareVersionsResponse>() { @java.lang.Override public CompareVersionsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CompareVersionsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CompareVersionsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dialogflow.cx.v3beta1.CompareVersionsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/hbase
36,246
hbase-server/src/main/java/org/apache/hadoop/hbase/security/access/SnapshotScannerHDFSAclController.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hbase.security.access; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashSet; import java.util.List; import java.util.Optional; import java.util.Set; import java.util.stream.Collectors; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.Path; import org.apache.hadoop.hbase.Cell; import org.apache.hadoop.hbase.CellUtil; import org.apache.hadoop.hbase.HBaseInterfaceAudience; import org.apache.hadoop.hbase.NamespaceDescriptor; import org.apache.hadoop.hbase.TableName; import org.apache.hadoop.hbase.TableNotFoundException; import org.apache.hadoop.hbase.client.Admin; import org.apache.hadoop.hbase.client.ColumnFamilyDescriptorBuilder; import org.apache.hadoop.hbase.client.Connection; import org.apache.hadoop.hbase.client.Delete; import org.apache.hadoop.hbase.client.Get; import org.apache.hadoop.hbase.client.Put; import org.apache.hadoop.hbase.client.RegionInfo; import org.apache.hadoop.hbase.client.Result; import org.apache.hadoop.hbase.client.ResultScanner; import org.apache.hadoop.hbase.client.Scan; import org.apache.hadoop.hbase.client.SnapshotDescription; import org.apache.hadoop.hbase.client.Table; import org.apache.hadoop.hbase.client.TableDescriptor; import org.apache.hadoop.hbase.client.TableDescriptorBuilder; import org.apache.hadoop.hbase.coprocessor.CoreCoprocessor; import org.apache.hadoop.hbase.coprocessor.HasMasterServices; import org.apache.hadoop.hbase.coprocessor.MasterCoprocessor; import org.apache.hadoop.hbase.coprocessor.MasterCoprocessorEnvironment; import org.apache.hadoop.hbase.coprocessor.MasterObserver; import org.apache.hadoop.hbase.coprocessor.ObserverContext; import org.apache.hadoop.hbase.master.MasterServices; import org.apache.hadoop.hbase.security.User; import org.apache.hadoop.hbase.security.UserProvider; import org.apache.hadoop.hbase.security.access.SnapshotScannerHDFSAclHelper.PathHelper; import org.apache.hadoop.hbase.util.Bytes; import org.apache.hadoop.hbase.util.Pair; import org.apache.yetus.audience.InterfaceAudience; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import org.apache.hbase.thirdparty.com.google.common.collect.Sets; /** * Set HDFS ACLs to hFiles to make HBase granted users have permission to scan snapshot * <p> * To use this feature, please mask sure HDFS config: * <ul> * <li>dfs.namenode.acls.enabled = true</li> * <li>fs.permissions.umask-mode = 027 (or smaller umask than 027)</li> * </ul> * </p> * <p> * The implementation of this feature is as followings: * <ul> * <li>For common directories such as 'data' and 'archive', set other permission to '--x' to make * everyone have the permission to access the directory.</li> * <li>For namespace or table directories such as 'data/ns/table', 'archive/ns/table' and * '.hbase-snapshot/snapshotName', set user 'r-x' access acl and 'r-x' default acl when following * operations happen: * <ul> * <li>grant user with global, namespace or table permission;</li> * <li>revoke user from global, namespace or table;</li> * <li>snapshot table;</li> * <li>truncate table;</li> * </ul> * </li> * <li>Note: Because snapshots are at table level, so this feature just considers users with global, * namespace or table permissions, ignores users with table CF or cell permissions.</li> * </ul> * </p> */ @CoreCoprocessor @InterfaceAudience.LimitedPrivate(HBaseInterfaceAudience.CONFIG) public class SnapshotScannerHDFSAclController implements MasterCoprocessor, MasterObserver { private static final Logger LOG = LoggerFactory.getLogger(SnapshotScannerHDFSAclController.class); private SnapshotScannerHDFSAclHelper hdfsAclHelper = null; private PathHelper pathHelper = null; private MasterServices masterServices = null; private volatile boolean initialized = false; private volatile boolean aclTableInitialized = false; /** Provider for mapping principal names to Users */ private UserProvider userProvider; @Override public Optional<MasterObserver> getMasterObserver() { return Optional.of(this); } @Override public void preMasterInitialization(ObserverContext<MasterCoprocessorEnvironment> c) throws IOException { if ( c.getEnvironment().getConfiguration() .getBoolean(SnapshotScannerHDFSAclHelper.ACL_SYNC_TO_HDFS_ENABLE, false) ) { MasterCoprocessorEnvironment mEnv = c.getEnvironment(); if (!(mEnv instanceof HasMasterServices)) { throw new IOException("Does not implement HMasterServices"); } masterServices = ((HasMasterServices) mEnv).getMasterServices(); hdfsAclHelper = new SnapshotScannerHDFSAclHelper(masterServices.getConfiguration(), masterServices.getConnection()); pathHelper = hdfsAclHelper.getPathHelper(); hdfsAclHelper.setCommonDirectoryPermission(); initialized = true; userProvider = UserProvider.instantiate(c.getEnvironment().getConfiguration()); } else { LOG.warn("Try to initialize the coprocessor SnapshotScannerHDFSAclController but failure " + "because the config " + SnapshotScannerHDFSAclHelper.ACL_SYNC_TO_HDFS_ENABLE + " is false."); } } @Override public void postStartMaster(ObserverContext<MasterCoprocessorEnvironment> c) throws IOException { if (!initialized) { return; } try (Admin admin = c.getEnvironment().getConnection().getAdmin()) { if (admin.tableExists(PermissionStorage.ACL_TABLE_NAME)) { // Check if acl table has 'm' CF, if not, add 'm' CF TableDescriptor tableDescriptor = admin.getDescriptor(PermissionStorage.ACL_TABLE_NAME); boolean containHdfsAclFamily = Arrays.stream(tableDescriptor.getColumnFamilies()).anyMatch( family -> Bytes.equals(family.getName(), SnapshotScannerHDFSAclStorage.HDFS_ACL_FAMILY)); if (!containHdfsAclFamily) { TableDescriptorBuilder builder = TableDescriptorBuilder.newBuilder(tableDescriptor) .setColumnFamily(ColumnFamilyDescriptorBuilder .newBuilder(SnapshotScannerHDFSAclStorage.HDFS_ACL_FAMILY).build()); admin.modifyTable(builder.build()); } aclTableInitialized = true; } else { throw new TableNotFoundException( "Table " + PermissionStorage.ACL_TABLE_NAME + " is not created yet. Please check if " + getClass().getName() + " is configured after " + AccessController.class.getName()); } } } @Override public void preStopMaster(ObserverContext<MasterCoprocessorEnvironment> c) { if (initialized) { hdfsAclHelper.close(); } } @Override public void postCompletedCreateTableAction(ObserverContext<MasterCoprocessorEnvironment> c, TableDescriptor desc, RegionInfo[] regions) throws IOException { if (needHandleTableHdfsAcl(desc, "createTable " + desc.getTableName())) { TableName tableName = desc.getTableName(); // 1. Create table directories to make HDFS acls can be inherited hdfsAclHelper.createTableDirectories(tableName); // 2. Add table owner HDFS acls String owner = getActiveUser(c).getShortName(); hdfsAclHelper.addTableAcl(tableName, Sets.newHashSet(owner), "create"); // 3. Record table owner permission is synced to HDFS in acl table SnapshotScannerHDFSAclStorage.addUserTableHdfsAcl(c.getEnvironment().getConnection(), owner, tableName); } } @Override public void postCreateNamespace(ObserverContext<MasterCoprocessorEnvironment> c, NamespaceDescriptor ns) throws IOException { if (checkInitialized("createNamespace " + ns.getName())) { // Create namespace directories to make HDFS acls can be inherited List<Path> paths = hdfsAclHelper.getNamespaceRootPaths(ns.getName()); for (Path path : paths) { hdfsAclHelper.createDirIfNotExist(path); } } } @Override public void postCompletedSnapshotAction(ObserverContext<MasterCoprocessorEnvironment> c, SnapshotDescription snapshot, TableDescriptor tableDescriptor) throws IOException { if (needHandleTableHdfsAcl(tableDescriptor, "snapshot " + snapshot.getName())) { // Add HDFS acls of users with table read permission to snapshot files hdfsAclHelper.snapshotAcl(snapshot); } } @Override public void postCompletedTruncateTableAction(ObserverContext<MasterCoprocessorEnvironment> c, TableName tableName) throws IOException { if (needHandleTableHdfsAcl(tableName, "truncateTable " + tableName)) { // 1. create tmp table directories hdfsAclHelper.createTableDirectories(tableName); // 2. Since the table directories is recreated, so add HDFS acls again Set<String> users = hdfsAclHelper.getUsersWithTableReadAction(tableName, false, false); hdfsAclHelper.addTableAcl(tableName, users, "truncate"); } } @Override public void postCompletedDeleteTableAction(ObserverContext<MasterCoprocessorEnvironment> ctx, TableName tableName) throws IOException { if (!tableName.isSystemTable() && checkInitialized("deleteTable " + tableName)) { /* * Remove table user access HDFS acl from namespace directory if the user has no permissions * of global, ns of the table or other tables of the ns, eg: Bob has 'ns1:t1' read permission, * when delete 'ns1:t1', if Bob has global read permission, '@ns1' read permission or * 'ns1:other_tables' read permission, then skip remove Bob access acl in ns1Dirs, otherwise, * remove Bob access acl. */ try (Table aclTable = ctx.getEnvironment().getConnection().getTable(PermissionStorage.ACL_TABLE_NAME)) { Set<String> users = SnapshotScannerHDFSAclStorage.getTableUsers(aclTable, tableName); if (users.size() > 0) { // 1. Remove table archive directory default ACLs hdfsAclHelper.removeTableDefaultAcl(tableName, users); // 2. Delete table owner permission is synced to HDFS in acl table SnapshotScannerHDFSAclStorage.deleteTableHdfsAcl(aclTable, tableName); // 3. Remove namespace access acls Set<String> removeUsers = filterUsersToRemoveNsAccessAcl(aclTable, tableName, users); if (removeUsers.size() > 0) { hdfsAclHelper.removeNamespaceAccessAcl(tableName, removeUsers, "delete"); } } } } } @Override public void postModifyTable(ObserverContext<MasterCoprocessorEnvironment> ctx, TableName tableName, TableDescriptor oldDescriptor, TableDescriptor currentDescriptor) throws IOException { try (Table aclTable = ctx.getEnvironment().getConnection().getTable(PermissionStorage.ACL_TABLE_NAME)) { if ( needHandleTableHdfsAcl(currentDescriptor, "modifyTable " + tableName) && !hdfsAclHelper.isAclSyncToHdfsEnabled(oldDescriptor) ) { // 1. Create table directories used for acl inherited hdfsAclHelper.createTableDirectories(tableName); // 2. Add table users HDFS acls Set<String> tableUsers = hdfsAclHelper.getUsersWithTableReadAction(tableName, false, false); Set<String> users = hdfsAclHelper.getUsersWithNamespaceReadAction(tableName.getNamespaceAsString(), true); users.addAll(tableUsers); hdfsAclHelper.addTableAcl(tableName, users, "modify"); // 3. Record table user acls are synced to HDFS in acl table SnapshotScannerHDFSAclStorage.addUserTableHdfsAcl(ctx.getEnvironment().getConnection(), tableUsers, tableName); } else if ( needHandleTableHdfsAcl(oldDescriptor, "modifyTable " + tableName) && !hdfsAclHelper.isAclSyncToHdfsEnabled(currentDescriptor) ) { // 1. Remove empty table directories List<Path> tableRootPaths = hdfsAclHelper.getTableRootPaths(tableName, false); for (Path path : tableRootPaths) { hdfsAclHelper.deleteEmptyDir(path); } // 2. Remove all table HDFS acls Set<String> tableUsers = hdfsAclHelper.getUsersWithTableReadAction(tableName, false, false); Set<String> users = hdfsAclHelper.getUsersWithNamespaceReadAction(tableName.getNamespaceAsString(), true); users.addAll(tableUsers); hdfsAclHelper.removeTableAcl(tableName, users); // 3. Remove namespace access HDFS acls for users who only own permission for this table hdfsAclHelper.removeNamespaceAccessAcl(tableName, filterUsersToRemoveNsAccessAcl(aclTable, tableName, tableUsers), "modify"); // 4. Record table user acl is not synced to HDFS SnapshotScannerHDFSAclStorage.deleteUserTableHdfsAcl(ctx.getEnvironment().getConnection(), tableUsers, tableName); } } } @Override public void postDeleteNamespace(ObserverContext<MasterCoprocessorEnvironment> ctx, String namespace) throws IOException { if (checkInitialized("deleteNamespace " + namespace)) { try (Table aclTable = ctx.getEnvironment().getConnection().getTable(PermissionStorage.ACL_TABLE_NAME)) { // 1. Delete namespace archive dir default ACLs Set<String> users = SnapshotScannerHDFSAclStorage.getEntryUsers(aclTable, PermissionStorage.toNamespaceEntry(Bytes.toBytes(namespace))); hdfsAclHelper.removeNamespaceDefaultAcl(namespace, users); // 2. Record namespace user acl is not synced to HDFS SnapshotScannerHDFSAclStorage.deleteNamespaceHdfsAcl(ctx.getEnvironment().getConnection(), namespace); // 3. Delete tmp namespace directory /** * Delete namespace tmp directory because it's created by this coprocessor when namespace is * created to make namespace default acl can be inherited by tables. The namespace data * directory is deleted by DeleteNamespaceProcedure, the namespace archive directory is * deleted by HFileCleaner. */ hdfsAclHelper.deleteEmptyDir(pathHelper.getTmpNsDir(namespace)); } } } @Override public void postGrant(ObserverContext<MasterCoprocessorEnvironment> c, UserPermission userPermission, boolean mergeExistingPermissions) throws IOException { if ( !checkInitialized( "grant " + userPermission + ", merge existing permissions " + mergeExistingPermissions) ) { return; } try (Table aclTable = c.getEnvironment().getConnection().getTable(PermissionStorage.ACL_TABLE_NAME)) { Configuration conf = c.getEnvironment().getConfiguration(); String userName = userPermission.getUser(); switch (userPermission.getAccessScope()) { case GLOBAL: UserPermission perm = getUserGlobalPermission(conf, userName); if (perm != null && hdfsAclHelper.containReadAction(perm)) { if (!isHdfsAclSet(aclTable, userName)) { // 1. Get namespaces and tables which global user acls are already synced Pair<Set<String>, Set<TableName>> skipNamespaceAndTables = SnapshotScannerHDFSAclStorage.getUserNamespaceAndTable(aclTable, userName); Set<String> skipNamespaces = skipNamespaceAndTables.getFirst(); Set<TableName> skipTables = skipNamespaceAndTables.getSecond().stream() .filter(t -> !skipNamespaces.contains(t.getNamespaceAsString())) .collect(Collectors.toSet()); // 2. Add HDFS acl(skip namespaces and tables directories whose acl is set) hdfsAclHelper.grantAcl(userPermission, skipNamespaces, skipTables); // 3. Record global acl is sync to HDFS SnapshotScannerHDFSAclStorage.addUserGlobalHdfsAcl(aclTable, userName); } } else { // The merged user permission doesn't contain READ, so remove user global HDFS acls if // it's set removeUserGlobalHdfsAcl(aclTable, userName, userPermission); } break; case NAMESPACE: String namespace = ((NamespacePermission) userPermission.getPermission()).getNamespace(); UserPermission nsPerm = getUserNamespacePermission(conf, userName, namespace); if (nsPerm != null && hdfsAclHelper.containReadAction(nsPerm)) { if (!isHdfsAclSet(aclTable, userName, namespace)) { // 1. Get tables which namespace user acls are already synced Set<TableName> skipTables = SnapshotScannerHDFSAclStorage .getUserNamespaceAndTable(aclTable, userName).getSecond(); // 2. Add HDFS acl(skip tables directories whose acl is set) hdfsAclHelper.grantAcl(userPermission, new HashSet<>(0), skipTables); } // 3. Record namespace acl is synced to HDFS SnapshotScannerHDFSAclStorage.addUserNamespaceHdfsAcl(aclTable, userName, namespace); } else { // The merged user permission doesn't contain READ, so remove user namespace HDFS acls // if it's set removeUserNamespaceHdfsAcl(aclTable, userName, namespace, userPermission); } break; case TABLE: TablePermission tablePerm = (TablePermission) userPermission.getPermission(); if (needHandleTableHdfsAcl(tablePerm)) { TableName tableName = tablePerm.getTableName(); UserPermission tPerm = getUserTablePermission(conf, userName, tableName); if (tPerm != null && hdfsAclHelper.containReadAction(tPerm)) { if (!isHdfsAclSet(aclTable, userName, tableName)) { // 1. create table dirs hdfsAclHelper.createTableDirectories(tableName); // 2. Add HDFS acl hdfsAclHelper.grantAcl(userPermission, new HashSet<>(0), new HashSet<>(0)); } // 2. Record table acl is synced to HDFS SnapshotScannerHDFSAclStorage.addUserTableHdfsAcl(aclTable, userName, tableName); } else { // The merged user permission doesn't contain READ, so remove user table HDFS acls if // it's set removeUserTableHdfsAcl(aclTable, userName, tableName, userPermission); } } break; default: throw new IllegalArgumentException( "Illegal user permission scope " + userPermission.getAccessScope()); } } } @Override public void postRevoke(ObserverContext<MasterCoprocessorEnvironment> c, UserPermission userPermission) throws IOException { if (checkInitialized("revoke " + userPermission)) { try (Table aclTable = c.getEnvironment().getConnection().getTable(PermissionStorage.ACL_TABLE_NAME)) { String userName = userPermission.getUser(); Configuration conf = c.getEnvironment().getConfiguration(); switch (userPermission.getAccessScope()) { case GLOBAL: UserPermission userGlobalPerm = getUserGlobalPermission(conf, userName); if (userGlobalPerm == null || !hdfsAclHelper.containReadAction(userGlobalPerm)) { removeUserGlobalHdfsAcl(aclTable, userName, userPermission); } break; case NAMESPACE: NamespacePermission nsPerm = (NamespacePermission) userPermission.getPermission(); UserPermission userNsPerm = getUserNamespacePermission(conf, userName, nsPerm.getNamespace()); if (userNsPerm == null || !hdfsAclHelper.containReadAction(userNsPerm)) { removeUserNamespaceHdfsAcl(aclTable, userName, nsPerm.getNamespace(), userPermission); } break; case TABLE: TablePermission tPerm = (TablePermission) userPermission.getPermission(); if (needHandleTableHdfsAcl(tPerm)) { TableName tableName = tPerm.getTableName(); UserPermission userTablePerm = getUserTablePermission(conf, userName, tableName); if (userTablePerm == null || !hdfsAclHelper.containReadAction(userTablePerm)) { removeUserTableHdfsAcl(aclTable, userName, tableName, userPermission); } } break; default: throw new IllegalArgumentException( "Illegal user permission scope " + userPermission.getAccessScope()); } } } } private void removeUserGlobalHdfsAcl(Table aclTable, String userName, UserPermission userPermission) throws IOException { if (SnapshotScannerHDFSAclStorage.hasUserGlobalHdfsAcl(aclTable, userName)) { // 1. Get namespaces and tables which global user acls are already synced Pair<Set<String>, Set<TableName>> namespaceAndTable = SnapshotScannerHDFSAclStorage.getUserNamespaceAndTable(aclTable, userName); Set<String> skipNamespaces = namespaceAndTable.getFirst(); Set<TableName> skipTables = namespaceAndTable.getSecond().stream() .filter(t -> !skipNamespaces.contains(t.getNamespaceAsString())) .collect(Collectors.toSet()); // 2. Remove user HDFS acls(skip namespaces and tables directories // whose acl must be reversed) hdfsAclHelper.revokeAcl(userPermission, skipNamespaces, skipTables); // 3. Remove global user acl is synced to HDFS in acl table SnapshotScannerHDFSAclStorage.deleteUserGlobalHdfsAcl(aclTable, userName); } } private void removeUserNamespaceHdfsAcl(Table aclTable, String userName, String namespace, UserPermission userPermission) throws IOException { if (SnapshotScannerHDFSAclStorage.hasUserNamespaceHdfsAcl(aclTable, userName, namespace)) { if (!SnapshotScannerHDFSAclStorage.hasUserGlobalHdfsAcl(aclTable, userName)) { // 1. Get tables whose namespace user acls are already synced Set<TableName> skipTables = SnapshotScannerHDFSAclStorage.getUserNamespaceAndTable(aclTable, userName).getSecond(); // 2. Remove user HDFS acls(skip tables directories whose acl must be reversed) hdfsAclHelper.revokeAcl(userPermission, new HashSet<>(), skipTables); } // 3. Remove namespace user acl is synced to HDFS in acl table SnapshotScannerHDFSAclStorage.deleteUserNamespaceHdfsAcl(aclTable, userName, namespace); } } private void removeUserTableHdfsAcl(Table aclTable, String userName, TableName tableName, UserPermission userPermission) throws IOException { if (SnapshotScannerHDFSAclStorage.hasUserTableHdfsAcl(aclTable, userName, tableName)) { if ( !SnapshotScannerHDFSAclStorage.hasUserGlobalHdfsAcl(aclTable, userName) && !SnapshotScannerHDFSAclStorage.hasUserNamespaceHdfsAcl(aclTable, userName, tableName.getNamespaceAsString()) ) { // 1. Remove table acls hdfsAclHelper.revokeAcl(userPermission, new HashSet<>(0), new HashSet<>(0)); } // 2. Remove table user acl is synced to HDFS in acl table SnapshotScannerHDFSAclStorage.deleteUserTableHdfsAcl(aclTable, userName, tableName); } } private UserPermission getUserGlobalPermission(Configuration conf, String userName) throws IOException { List<UserPermission> permissions = PermissionStorage.getUserPermissions(conf, PermissionStorage.ACL_GLOBAL_NAME, null, null, userName, true); return permissions.size() > 0 ? permissions.get(0) : null; } private UserPermission getUserNamespacePermission(Configuration conf, String userName, String namespace) throws IOException { List<UserPermission> permissions = PermissionStorage.getUserNamespacePermissions(conf, namespace, userName, true); return permissions.size() > 0 ? permissions.get(0) : null; } private UserPermission getUserTablePermission(Configuration conf, String userName, TableName tableName) throws IOException { List<UserPermission> permissions = PermissionStorage .getUserTablePermissions(conf, tableName, null, null, userName, true).stream() .filter(userPermission -> hdfsAclHelper .isNotFamilyOrQualifierPermission((TablePermission) userPermission.getPermission())) .collect(Collectors.toList()); return permissions.size() > 0 ? permissions.get(0) : null; } private boolean isHdfsAclSet(Table aclTable, String userName) throws IOException { return isHdfsAclSet(aclTable, userName, null, null); } private boolean isHdfsAclSet(Table aclTable, String userName, String namespace) throws IOException { return isHdfsAclSet(aclTable, userName, namespace, null); } private boolean isHdfsAclSet(Table aclTable, String userName, TableName tableName) throws IOException { return isHdfsAclSet(aclTable, userName, null, tableName); } /** * Check if user global/namespace/table HDFS acls is already set */ private boolean isHdfsAclSet(Table aclTable, String userName, String namespace, TableName tableName) throws IOException { boolean isSet = SnapshotScannerHDFSAclStorage.hasUserGlobalHdfsAcl(aclTable, userName); if (namespace != null) { isSet = isSet || SnapshotScannerHDFSAclStorage.hasUserNamespaceHdfsAcl(aclTable, userName, namespace); } if (tableName != null) { isSet = isSet || SnapshotScannerHDFSAclStorage.hasUserNamespaceHdfsAcl(aclTable, userName, tableName.getNamespaceAsString()) || SnapshotScannerHDFSAclStorage.hasUserTableHdfsAcl(aclTable, userName, tableName); } return isSet; } @InterfaceAudience.Private boolean checkInitialized(String operation) { if (initialized) { if (aclTableInitialized) { return true; } else { LOG.warn("Skip set HDFS acls because acl table is not initialized when {}", operation); } } return false; } private boolean needHandleTableHdfsAcl(TablePermission tablePermission) throws IOException { return needHandleTableHdfsAcl(tablePermission.getTableName(), "") && hdfsAclHelper.isNotFamilyOrQualifierPermission(tablePermission); } private boolean needHandleTableHdfsAcl(TableName tableName, String operation) throws IOException { return !tableName.isSystemTable() && checkInitialized(operation) && hdfsAclHelper.isAclSyncToHdfsEnabled(masterServices.getTableDescriptors().get(tableName)); } private boolean needHandleTableHdfsAcl(TableDescriptor tableDescriptor, String operation) { TableName tableName = tableDescriptor.getTableName(); return !tableName.isSystemTable() && checkInitialized(operation) && hdfsAclHelper.isAclSyncToHdfsEnabled(tableDescriptor); } private User getActiveUser(ObserverContext<?> ctx) throws IOException { // for non-rpc handling, fallback to system user Optional<User> optionalUser = ctx.getCaller(); if (optionalUser.isPresent()) { return optionalUser.get(); } return userProvider.getCurrent(); } /** * Remove table user access HDFS acl from namespace directory if the user has no permissions of * global, ns of the table or other tables of the ns, eg: Bob has 'ns1:t1' read permission, when * delete 'ns1:t1', if Bob has global read permission, '@ns1' read permission or * 'ns1:other_tables' read permission, then skip remove Bob access acl in ns1Dirs, otherwise, * remove Bob access acl. * @param aclTable acl table * @param tableName the name of the table * @param tablesUsers table users set * @return users whose access acl will be removed from the namespace of the table * @throws IOException if an error occurred */ private Set<String> filterUsersToRemoveNsAccessAcl(Table aclTable, TableName tableName, Set<String> tablesUsers) throws IOException { Set<String> removeUsers = new HashSet<>(); byte[] namespace = tableName.getNamespace(); for (String user : tablesUsers) { List<byte[]> userEntries = SnapshotScannerHDFSAclStorage.getUserEntries(aclTable, user); boolean remove = true; for (byte[] entry : userEntries) { if ( PermissionStorage.isGlobalEntry(entry) || (PermissionStorage.isNamespaceEntry(entry) && Bytes.equals(PermissionStorage.fromNamespaceEntry(entry), namespace)) || (PermissionStorage.isTableEntry(entry) && !Bytes.equals(tableName.getName(), entry) && Bytes.equals(TableName.valueOf(entry).getNamespace(), namespace)) ) { remove = false; break; } } if (remove) { removeUsers.add(user); } } return removeUsers; } static final class SnapshotScannerHDFSAclStorage { /** * Add a new CF in HBase acl table to record if the HBase read permission is synchronized to * related hfiles. The record has two usages: 1. check if we need to remove HDFS acls for a * grant without READ permission(eg: grant user table read permission and then grant user table * write permission without merging the existing permissions, in this case, need to remove HDFS * acls); 2. skip some HDFS acl sync because it may be already set(eg: grant user table read * permission and then grant user ns read permission; grant user table read permission and then * grant user table write permission with merging the existing permissions). */ static final byte[] HDFS_ACL_FAMILY = Bytes.toBytes("m"); // The value 'R' has no specific meaning, if cell value is not null, it means that the user HDFS // acls is set to hfiles. private static final byte[] HDFS_ACL_VALUE = Bytes.toBytes("R"); static void addUserGlobalHdfsAcl(Table aclTable, String user) throws IOException { addUserEntry(aclTable, user, PermissionStorage.ACL_GLOBAL_NAME); } static void addUserNamespaceHdfsAcl(Table aclTable, String user, String namespace) throws IOException { addUserEntry(aclTable, user, Bytes.toBytes(PermissionStorage.toNamespaceEntry(namespace))); } static void addUserTableHdfsAcl(Connection connection, Set<String> users, TableName tableName) throws IOException { try (Table aclTable = connection.getTable(PermissionStorage.ACL_TABLE_NAME)) { for (String user : users) { addUserTableHdfsAcl(aclTable, user, tableName); } } } static void addUserTableHdfsAcl(Connection connection, String user, TableName tableName) throws IOException { try (Table aclTable = connection.getTable(PermissionStorage.ACL_TABLE_NAME)) { addUserTableHdfsAcl(aclTable, user, tableName); } } static void addUserTableHdfsAcl(Table aclTable, String user, TableName tableName) throws IOException { addUserEntry(aclTable, user, tableName.getName()); } private static void addUserEntry(Table t, String user, byte[] entry) throws IOException { Put p = new Put(entry); p.addColumn(HDFS_ACL_FAMILY, Bytes.toBytes(user), HDFS_ACL_VALUE); t.put(p); } static void deleteUserGlobalHdfsAcl(Table aclTable, String user) throws IOException { deleteUserEntry(aclTable, user, PermissionStorage.ACL_GLOBAL_NAME); } static void deleteUserNamespaceHdfsAcl(Table aclTable, String user, String namespace) throws IOException { deleteUserEntry(aclTable, user, Bytes.toBytes(PermissionStorage.toNamespaceEntry(namespace))); } static void deleteUserTableHdfsAcl(Table aclTable, String user, TableName tableName) throws IOException { deleteUserEntry(aclTable, user, tableName.getName()); } static void deleteUserTableHdfsAcl(Connection connection, Set<String> users, TableName tableName) throws IOException { try (Table aclTable = connection.getTable(PermissionStorage.ACL_TABLE_NAME)) { for (String user : users) { deleteUserTableHdfsAcl(aclTable, user, tableName); } } } private static void deleteUserEntry(Table aclTable, String user, byte[] entry) throws IOException { Delete delete = new Delete(entry); delete.addColumns(HDFS_ACL_FAMILY, Bytes.toBytes(user)); aclTable.delete(delete); } static void deleteNamespaceHdfsAcl(Connection connection, String namespace) throws IOException { try (Table aclTable = connection.getTable(PermissionStorage.ACL_TABLE_NAME)) { deleteEntry(aclTable, Bytes.toBytes(PermissionStorage.toNamespaceEntry(namespace))); } } static void deleteTableHdfsAcl(Table aclTable, TableName tableName) throws IOException { deleteEntry(aclTable, tableName.getName()); } private static void deleteEntry(Table aclTable, byte[] entry) throws IOException { Delete delete = new Delete(entry); delete.addFamily(HDFS_ACL_FAMILY); aclTable.delete(delete); } static Set<String> getTableUsers(Table aclTable, TableName tableName) throws IOException { return getEntryUsers(aclTable, tableName.getName()); } private static Set<String> getEntryUsers(Table aclTable, byte[] entry) throws IOException { Set<String> users = new HashSet<>(); Get get = new Get(entry); get.addFamily(HDFS_ACL_FAMILY); Result result = aclTable.get(get); List<Cell> cells = result.listCells(); if (cells != null) { for (Cell cell : cells) { if (cell != null) { users.add(Bytes.toString(CellUtil.cloneQualifier(cell))); } } } return users; } static Pair<Set<String>, Set<TableName>> getUserNamespaceAndTable(Table aclTable, String userName) throws IOException { Set<String> namespaces = new HashSet<>(); Set<TableName> tables = new HashSet<>(); List<byte[]> userEntries = getUserEntries(aclTable, userName); for (byte[] entry : userEntries) { if (PermissionStorage.isNamespaceEntry(entry)) { namespaces.add(Bytes.toString(PermissionStorage.fromNamespaceEntry(entry))); } else if (PermissionStorage.isTableEntry(entry)) { tables.add(TableName.valueOf(entry)); } } return new Pair<>(namespaces, tables); } static List<byte[]> getUserEntries(Table aclTable, String userName) throws IOException { Scan scan = new Scan(); scan.addColumn(HDFS_ACL_FAMILY, Bytes.toBytes(userName)); ResultScanner scanner = aclTable.getScanner(scan); List<byte[]> entry = new ArrayList<>(); for (Result result : scanner) { if (result != null && result.getRow() != null) { entry.add(result.getRow()); } } return entry; } static boolean hasUserGlobalHdfsAcl(Table aclTable, String user) throws IOException { return hasUserEntry(aclTable, user, PermissionStorage.ACL_GLOBAL_NAME); } static boolean hasUserNamespaceHdfsAcl(Table aclTable, String user, String namespace) throws IOException { return hasUserEntry(aclTable, user, Bytes.toBytes(PermissionStorage.toNamespaceEntry(namespace))); } static boolean hasUserTableHdfsAcl(Table aclTable, String user, TableName tableName) throws IOException { return hasUserEntry(aclTable, user, tableName.getName()); } private static boolean hasUserEntry(Table aclTable, String userName, byte[] entry) throws IOException { Get get = new Get(entry); get.addColumn(HDFS_ACL_FAMILY, Bytes.toBytes(userName)); return aclTable.exists(get); } } }
google/j2objc
36,082
translator/src/main/java/com/google/devtools/j2objc/util/ElementUtil.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.devtools.j2objc.util; import com.google.common.collect.Iterables; import com.google.common.collect.Lists; import com.google.common.collect.Sets; import com.google.devtools.j2objc.Options; import com.google.devtools.j2objc.ast.QualifiedName; import com.google.devtools.j2objc.ast.SimpleName; import com.google.devtools.j2objc.types.GeneratedElement; import com.google.devtools.j2objc.types.GeneratedExecutableElement; import com.google.devtools.j2objc.types.GeneratedTypeElement; import com.google.devtools.j2objc.types.GeneratedVariableElement; import com.google.devtools.j2objc.types.LambdaTypeElement; import com.google.j2objc.annotations.Property; import com.google.j2objc.annotations.RetainedWith; import com.sun.tools.javac.code.Attribute; import com.sun.tools.javac.code.Flags; import com.sun.tools.javac.code.Symbol; import com.sun.tools.javac.code.Symbol.ClassSymbol; import com.sun.tools.javac.code.Symbol.VarSymbol; import com.sun.tools.javac.code.SymbolMetadata; import java.lang.reflect.Method; import java.util.Arrays; import java.util.Collections; import java.util.EnumSet; import java.util.HashMap; import java.util.HashSet; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import java.util.regex.Pattern; import javax.annotation.ParametersAreNonnullByDefault; import javax.lang.model.AnnotatedConstruct; import javax.lang.model.element.AnnotationMirror; import javax.lang.model.element.AnnotationValue; import javax.lang.model.element.Element; import javax.lang.model.element.ElementKind; import javax.lang.model.element.ExecutableElement; import javax.lang.model.element.Modifier; import javax.lang.model.element.Name; import javax.lang.model.element.NestingKind; import javax.lang.model.element.PackageElement; import javax.lang.model.element.TypeElement; import javax.lang.model.element.VariableElement; import javax.lang.model.type.DeclaredType; import javax.lang.model.type.TypeKind; import javax.lang.model.type.TypeMirror; import javax.lang.model.type.TypeVariable; import javax.lang.model.util.Elements; import javax.tools.JavaFileObject; import org.jspecify.annotations.NullMarked; import org.jspecify.annotations.Nullable; /** * Utility methods for working with elements. */ public final class ElementUtil { // Flags defined in JVM spec, table 4.1. These constants are also defined in // java.lang.reflect.Modifier, but aren't public. public static final int ACC_BRIDGE = 0x40; public static final int ACC_VARARGS = 0x80; public static final int ACC_SYNTHETIC = 0x1000; public static final int ACC_ANNOTATION = 0x2000; public static final int ACC_ENUM = 0x4000; // Not defined in JVM spec, but used by reflection support. public static final int ACC_ANONYMOUS = 0x8000; // Class files can only use the lower 16 bits. public static final int ACC_FLAG_MASK = 0xFFFF; private static final Set<Modifier> VISIBILITY_MODIFIERS = EnumSet.of( Modifier.PUBLIC, Modifier.PROTECTED, Modifier.PRIVATE); private static final String LAZY_INIT = "com.google.errorprone.annotations.concurrent.LazyInit"; private static final Pattern NULLABLE_PATTERN = Pattern.compile("Nullable.*|CheckForNull|ParametricNullness"); private static final Pattern NONNULL_PATTERN = Pattern.compile("No[nt][Nn]ull.*"); private final Elements javacElements; private final Map<Element, TypeMirror> elementTypeMap = new HashMap<>(); private final TypeElement javaObject; public ElementUtil(Elements javacElements) { this.javacElements = javacElements; javaObject = javacElements.getTypeElement("java.lang.Object"); } public static String getName(Element element) { // Always return qualified package names. Name name = element.getKind() == ElementKind.PACKAGE ? ((PackageElement) element).getQualifiedName() : element.getSimpleName(); return name.toString(); } public static String getQualifiedName(TypeElement element) { return element.getQualifiedName().toString(); } public static boolean isNamed(Element element, String name) { return element.getSimpleName().contentEquals(name); } public static boolean isStatic(Element element) { return hasModifier(element, Modifier.STATIC); } public static boolean isDefault(Element element) { // Indirectly check whether Modifier.DEFAULT exists, since it was // added in Java 8. try { Modifier m = Modifier.valueOf("DEFAULT"); return hasModifier(element, m); } catch (IllegalArgumentException e) { return false; } } public static boolean isFinal(Element element) { return hasModifier(element, Modifier.FINAL); } public static boolean isPublic(Element element) { return hasModifier(element, Modifier.PUBLIC); } public static boolean isPrivate(Element element) { return hasModifier(element, Modifier.PRIVATE); } public static boolean isVolatile(VariableElement element) { return hasModifier(element, Modifier.VOLATILE) // Upgrade reference type fields marked with error prone's LazyInit because this indicates // an intentional racy init. || (!element.asType().getKind().isPrimitive() && hasQualifiedNamedAnnotation(element, LAZY_INIT)); } public static boolean isTopLevel(TypeElement type) { return type.getNestingKind() == NestingKind.TOP_LEVEL; } public static boolean isAnonymous(TypeElement type) { return type.getNestingKind() == NestingKind.ANONYMOUS; } public static boolean isLocal(TypeElement type) { NestingKind nestingKind = type.getNestingKind(); return nestingKind == NestingKind.ANONYMOUS || nestingKind == NestingKind.LOCAL; } public static boolean isLambda(TypeElement type) { return type instanceof LambdaTypeElement; } public static boolean isInterface(Element type) { return type.getKind() == ElementKind.INTERFACE; } public static boolean isAnnotationType(Element type) { return type.getKind() == ElementKind.ANNOTATION_TYPE; } public static boolean isEnum(Element e) { return e.getKind() == ElementKind.ENUM; } public static boolean isEnumConstant(Element e) { return e.getKind() == ElementKind.ENUM_CONSTANT; } public static boolean isPackage(Element e) { return e.getKind() == ElementKind.PACKAGE; } public static boolean isRecord(Element e) { // Check it as a string so translator doesn't have to run with a Java 17 minimum. return e.getKind().name().equals("RECORD"); } public static boolean isTypeElement(Element e) { ElementKind kind = e.getKind(); return kind.isClass() || kind.isInterface(); } public static boolean isExecutableElement(Element e) { ElementKind kind = e.getKind(); return kind == ElementKind.CONSTRUCTOR || kind == ElementKind.METHOD; } public static boolean isTypeParameterElement(Element e) { return e.getKind() == ElementKind.TYPE_PARAMETER; } public static boolean isAnnotationMember(ExecutableElement e) { return isAnnotationType(getDeclaringClass(e)); } //TODO(malvania): For elements inside static blocks, this method returns a "TypeElement" of a // static block, which does not work with getBinaryName(TypeElement) (one proven example) public static TypeElement getDeclaringClass(Element element) { do { element = element.getEnclosingElement(); } while (element != null && !isTypeElement(element)); return (TypeElement) element; } public static @Nullable TypeElement getSuperclass(TypeElement element) { TypeMirror superClass = element.getSuperclass(); return superClass != null ? TypeUtil.asTypeElement(element.getSuperclass()) : null; } public static List<TypeElement> getInterfaces(TypeElement element) { return Lists.newArrayList(Iterables.transform( element.getInterfaces(), i -> TypeUtil.asTypeElement(i))); } public static boolean isPrimitiveConstant(VariableElement element) { return isFinal(element) && element.asType().getKind().isPrimitive() && element.getConstantValue() != null // Exclude local variables declared final. && element.getKind().isField(); } public static boolean isConstant(VariableElement element) { Object constantValue = element.getConstantValue(); return constantValue != null && (element.asType().getKind().isPrimitive() || (constantValue instanceof String && UnicodeUtils.hasValidCppCharacters((String) constantValue))); } public static boolean isStringConstant(VariableElement element) { Object constantValue = element.getConstantValue(); return constantValue != null && constantValue instanceof String && UnicodeUtils.hasValidCppCharacters((String) constantValue); } /** * Returns whether this variable will be declared in global scope in ObjC. */ public static boolean isGlobalVar(VariableElement element) { return isStatic(element) || isPrimitiveConstant(element); } /** * Returns whether this variable will be an ObjC instance variable. */ public static boolean isInstanceVar(VariableElement element) { return element.getKind() == ElementKind.FIELD && !isGlobalVar(element); } public static boolean isNonnull(VariableElement element) { return element instanceof GeneratedVariableElement && ((GeneratedVariableElement) element).isNonnull(); } public static String getTypeQualifiers(VariableElement element) { return element instanceof GeneratedVariableElement ? ((GeneratedVariableElement) element).getTypeQualifiers() : null; } public static boolean isAbstract(Element element) { return hasModifier(element, Modifier.ABSTRACT); } public static boolean isNative(Element element) { return hasModifier(element, Modifier.NATIVE); } public static boolean isSynchronized(Element element) { return hasModifier(element, Modifier.SYNCHRONIZED); } public static boolean isSynthetic(int modifiers) { return (modifiers & ACC_SYNTHETIC) != 0; } public static boolean isSynthetic(Element e) { if (e instanceof GeneratedElement) { return ((GeneratedElement) e).isSynthetic(); } if (e instanceof Symbol) { return (((Symbol) e).flags() & Flags.SYNTHETIC) > 0; } return false; } public static String getHeader(TypeElement e) { return e instanceof GeneratedTypeElement ? ((GeneratedTypeElement) e).getHeader() : null; } public static @Nullable String getForwardDeclaration(TypeElement e) { return e instanceof GeneratedTypeElement ? ((GeneratedTypeElement) e).getForwardDeclaration() : null; } public static boolean isIosType(TypeElement e) { return e instanceof GeneratedTypeElement && ((GeneratedTypeElement) e).isIosType(); } public static String getSelector(ExecutableElement e) { if (e instanceof GeneratedExecutableElement) { return ((GeneratedExecutableElement) e).getSelector(); } return null; } public static boolean isPackageInfo(TypeElement type) { return type.getSimpleName().toString().equals(NameTable.PACKAGE_INFO_CLASS_NAME); } /** * Tests if this type element is private to its source file. A public type declared * within a private type is considered private. */ public static boolean isPrivateInnerType(TypeElement type) { switch (type.getNestingKind()) { case ANONYMOUS: case LOCAL: return true; case MEMBER: return isPrivate(type) || isPrivateInnerType((TypeElement) type.getEnclosingElement()); case TOP_LEVEL: return isPrivate(type); } throw new AssertionError("Unknown NestingKind"); } /** * Determines if a type element can access fields and methods from an outer class. */ public static boolean hasOuterContext(TypeElement type) { switch (type.getNestingKind()) { case ANONYMOUS: case LOCAL: return !isStatic(type.getEnclosingElement()); case MEMBER: return !isStatic(type); case TOP_LEVEL: return false; } throw new AssertionError("Unknown NestingKind"); } private static boolean hasModifier(Element element, Modifier modifier) { return element.getModifiers().contains(modifier); } public static boolean isVariable(Element element) { ElementKind kind = element.getKind(); return kind == ElementKind.FIELD || kind == ElementKind.LOCAL_VARIABLE || kind == ElementKind.PARAMETER || kind == ElementKind.EXCEPTION_PARAMETER || kind == ElementKind.RESOURCE_VARIABLE || kind == ElementKind.ENUM_CONSTANT; } public static boolean isField(Element element) { return element.getKind() == ElementKind.FIELD; } public static boolean isParameter(Element element) { return element.getKind() == ElementKind.PARAMETER; } public static boolean isLocalVariable(Element element) { return element.getKind() == ElementKind.LOCAL_VARIABLE; } public static boolean isMethod(Element element) { return element.getKind() == ElementKind.METHOD; } public static boolean isConstructor(Element element) { return element.getKind() == ElementKind.CONSTRUCTOR; } public static boolean isInstanceMethod(Element element) { return isMethod(element) && !isStatic(element); } public static boolean isWeakReference(VariableElement var) { return hasNamedAnnotation(var, "Weak") || hasWeakPropertyAttribute(var) || (var instanceof GeneratedVariableElement && ((GeneratedVariableElement) var).isWeak()); } public static boolean isUnretainedReference(VariableElement var) { return isWeakReference(var); } public boolean isWeakOuterType(TypeElement type) { if (type instanceof LambdaTypeElement) { return ((LambdaTypeElement) type).isWeakOuter(); } else if (isAnonymous(type)) { // TODO(kstanger): remove this block when javac conversion is complete. // For anonymous classes we must check for a TYPE_USE annotation on the supertype used in the // declaration. For example: // Runnable r = new @WeakOuter Runnable() { ... }; TypeMirror superclass = type.getSuperclass(); if (superclass != null && hasNamedAnnotation(superclass, "WeakOuter")) { return true; } for (TypeMirror intrface : type.getInterfaces()) { if (hasNamedAnnotation(intrface, "WeakOuter")) { return true; } } if (elementTypeMap.containsKey(type)) { return hasNamedAnnotation(elementTypeMap.get(type), "WeakOuter"); } return hasNamedAnnotation(type.asType(), "WeakOuter"); } else { return hasNamedAnnotation(type, "WeakOuter"); } } private static boolean hasWeakPropertyAttribute(VariableElement var) { AnnotationMirror annotation = getAnnotation(var, Property.class); return annotation != null && parsePropertyAttribute(annotation).contains("weak"); } /** * Returns the attributes of a Property annotation. */ public static Set<String> parsePropertyAttribute(AnnotationMirror annotation) { assert getName(annotation.getAnnotationType().asElement()).equals("Property"); String attributesStr = (String) getAnnotationValue(annotation, "value"); Set<String> attributes = new HashSet<>(); if (attributesStr != null) { attributes.addAll(Arrays.asList(attributesStr.split(",\\s*"))); attributes.remove(""); // Clear any empty strings. } return attributes; } public static boolean isRetainedWithField(VariableElement varElement) { return hasAnnotation(varElement, RetainedWith.class); } public static <T extends Element> Iterable<T> filterEnclosedElements( Element elem, Class<T> resultClass, ElementKind... kinds) { List<ElementKind> kindsList = Arrays.asList(kinds); return Iterables.transform(Iterables.filter( elem.getEnclosedElements(), e -> kindsList.contains(e.getKind())), resultClass::cast); } public static Iterable<ExecutableElement> getMethods(TypeElement e) { return filterEnclosedElements(e, ExecutableElement.class, ElementKind.METHOD); } public static Iterable<ExecutableElement> getConstructors(TypeElement e) { return filterEnclosedElements(e, ExecutableElement.class, ElementKind.CONSTRUCTOR); } public static List<ExecutableElement> getExecutables(TypeElement e) { return Lists.newArrayList(filterEnclosedElements( e, ExecutableElement.class, ElementKind.CONSTRUCTOR, ElementKind.METHOD)); } public static List<VariableElement> getDeclaredFields(Element e) { return Lists.newArrayList(filterEnclosedElements(e, VariableElement.class, ElementKind.FIELD)); } public static Iterable<TypeElement> getDeclaredTypes(TypeElement e) { return filterEnclosedElements( e, TypeElement.class, ElementKind.ANNOTATION_TYPE, ElementKind.ENUM, ElementKind.CLASS, ElementKind.INTERFACE); } private static boolean paramsMatch(ExecutableElement method, String[] paramTypes) { List<? extends VariableElement> params = method.getParameters(); int size = params.size(); if (size != paramTypes.length) { return false; } for (int i = 0; i < size; i++) { if (!TypeUtil.getQualifiedName(params.get(i).asType()).equals(paramTypes[i])) { return false; } } return true; } public static ExecutableElement findMethod(TypeElement type, String name, String... paramTypes) { return Iterables.getFirst(Iterables.filter( filterEnclosedElements(type, ExecutableElement.class, ElementKind.METHOD), method -> getName(method).equals(name) && paramsMatch(method, paramTypes)), null); } /** Locate method which matches either Java or Objective C getter name patterns. */ public static ExecutableElement findGetterMethod( String propertyName, TypeMirror propertyType, TypeElement declaringClass, boolean isStatic) { // Try Objective-C getter naming convention. ExecutableElement getter = ElementUtil.findMethod(declaringClass, propertyName); if (getter == null) { // Try Java getter naming conventions. String prefix = TypeUtil.isBoolean(propertyType) ? "is" : "get"; getter = ElementUtil.findMethod(declaringClass, prefix + NameTable.capitalize(propertyName)); } return getter != null && isStatic == isStatic(getter) ? getter : null; } /** Locate method which matches the Java/Objective C setter name pattern. */ public static ExecutableElement findSetterMethod( String propertyName, TypeMirror type, TypeElement declaringClass, boolean isStatic) { ExecutableElement setter = ElementUtil.findMethod( declaringClass, "set" + NameTable.capitalize(propertyName), TypeUtil.getQualifiedName(type)); return setter != null && isStatic == isStatic(setter) ? setter : null; } public static ExecutableElement findConstructor(TypeElement type, String... paramTypes) { return Iterables.getFirst(Iterables.filter( getConstructors(type), method -> paramsMatch(method, paramTypes)), null); } public static VariableElement findField(TypeElement type, String name) { return Iterables.getFirst(Iterables.filter( filterEnclosedElements(type, VariableElement.class, ElementKind.FIELD), field -> getName(field).equals(name)), null); } public static Iterable<TypeMirror> asTypes(Iterable<? extends Element> elements) { return Iterables.transform(elements, elem -> elem.asType()); } public boolean overrides( ExecutableElement overrider, ExecutableElement overridden, TypeElement type) { return javacElements.overrides(overrider, overridden, type); } public static PackageElement getPackage(Element e) { while (e != null && !isPackage(e)) { e = e.getEnclosingElement(); } return (PackageElement) e; } public com.google.devtools.j2objc.ast.Name getPackageName(PackageElement element) { PackageElement parent = getParentPackage(element); if (parent == null) { return new SimpleName(element); } return new QualifiedName(element, element.asType(), getPackageName(parent)); } public PackageElement getParentPackage(PackageElement element) { String name = element.getQualifiedName().toString(); if (name.isEmpty() || !name.contains(".")) { return null; } name = name.substring(0, name.lastIndexOf('.')); // Try the Java 9+ API where the module needs to be specified to find the package. try { Method getModuleOf = Elements.class.getMethod("getModuleOf", Element.class); Object module = getModuleOf.invoke(javacElements, element); Method getPackageElement = Elements.class .getMethod("getPackageElement", getModuleOf.getReturnType(), CharSequence.class); return (PackageElement) getPackageElement.invoke(javacElements, module, name); } catch (ReflectiveOperationException e) { // Default behavior: Java 8. return javacElements.getPackageElement(name); } } public String getBinaryName(TypeElement e) { if (e instanceof GeneratedTypeElement) { TypeElement declaringClass = getDeclaringClass(e); if (declaringClass != null) { return getBinaryName(declaringClass) + '$' + getName(e); } else { return getQualifiedName(e); } } return javacElements.getBinaryName(e).toString(); } public ExecutableElement getOriginalMethod(ExecutableElement method) { TypeElement declaringClass = getDeclaringClass(method); return getOriginalMethod(method, declaringClass, declaringClass); } /** * Finds the original method element to use for generating a selector. The method returned is the * first method found in the hierarchy while traversing in order of declared inheritance that * doesn't override a method from a supertype. (ie. it is the first leaf node found in the tree of * overriding methods) */ public ExecutableElement getOriginalMethod( ExecutableElement topMethod, TypeElement declaringClass, TypeElement currentType) { if (currentType == null) { return null; } TypeElement superclass = currentType.getKind().isInterface() ? javaObject : getSuperclass(currentType); ExecutableElement original = getOriginalMethod(topMethod, declaringClass, superclass); if (original != null) { return original; } for (TypeMirror supertype : currentType.getInterfaces()) { original = getOriginalMethod(topMethod, declaringClass, TypeUtil.asTypeElement(supertype)); if (original != null) { return original; } } if (declaringClass == currentType) { return topMethod; } for (ExecutableElement candidate : getMethods(currentType)) { if (isInstanceMethod(candidate) && overrides(topMethod, candidate, declaringClass)) { return candidate; } } return null; } Map<? extends ExecutableElement, ? extends AnnotationValue> getElementValuesWithDefaults( AnnotationMirror annotation) { DeclaredType type = annotation.getAnnotationType(); Map<ExecutableElement, AnnotationValue> map = new LinkedHashMap<>( annotation.getElementValues()); for (ExecutableElement method : getMethods((TypeElement) type.asElement())) { AnnotationValue defaultValue = method.getDefaultValue(); if (defaultValue != null && !map.containsKey(method)) { map.put(method, defaultValue); } } return map; } public static Set<Modifier> getVisibilityModifiers(Element e) { return Sets.intersection(e.getModifiers(), VISIBILITY_MODIFIERS); } // This conversion is lossy because there is no bit for "default" the JVM spec. public static int fromModifierSet(Set<Modifier> set) { int modifiers = 0; if (set.contains(Modifier.PUBLIC)) { modifiers |= java.lang.reflect.Modifier.PUBLIC; } if (set.contains(Modifier.PRIVATE)) { modifiers |= java.lang.reflect.Modifier.PRIVATE; } if (set.contains(Modifier.PROTECTED)) { modifiers |= java.lang.reflect.Modifier.PROTECTED; } if (set.contains(Modifier.STATIC)) { modifiers |= java.lang.reflect.Modifier.STATIC; } if (set.contains(Modifier.FINAL)) { modifiers |= java.lang.reflect.Modifier.FINAL; } if (set.contains(Modifier.SYNCHRONIZED)) { modifiers |= java.lang.reflect.Modifier.SYNCHRONIZED; } if (set.contains(Modifier.VOLATILE)) { modifiers |= java.lang.reflect.Modifier.VOLATILE; } if (set.contains(Modifier.TRANSIENT)) { modifiers |= java.lang.reflect.Modifier.TRANSIENT; } if (set.contains(Modifier.NATIVE)) { modifiers |= java.lang.reflect.Modifier.NATIVE; } if (set.contains(Modifier.ABSTRACT)) { modifiers |= java.lang.reflect.Modifier.ABSTRACT; } if (set.contains(Modifier.STRICTFP)) { modifiers |= java.lang.reflect.Modifier.STRICT; } return modifiers; } public static boolean isRuntimeAnnotation(AnnotationMirror mirror) { return isRuntimeAnnotation(mirror.getAnnotationType().asElement()); } public static boolean isRuntimeAnnotation(Element e) { return isAnnotationType(e) && hasRetentionPolicy(e, "RUNTIME"); } public static boolean isGeneratedAnnotation(AnnotationMirror mirror) { return isGeneratedAnnotation(mirror.getAnnotationType().asElement()); } public static boolean isToBeRemovedAnnotations(Element e, Options options) { if (isAnnotationType(e)) { if (options.stripReflection()) { // If --strip-reflection flag is on, CLASS, SOURCE or RUNTIME annotations will be removed. return true; } else { // If --strip-reflection flag is off, CLASS or SOURCE annotations will be removed. return !isRuntimeAnnotation(e); } } return false; } public static boolean isGeneratedAnnotation(Element e) { // Use a negative check, since CLASS retention is the default. return isAnnotationType(e) && !hasRetentionPolicy(e, "SOURCE"); } private static boolean hasRetentionPolicy(Element e, String policy) { for (AnnotationMirror ann : getAllAnnotations(e)) { String annotationName = ann.getAnnotationType().asElement().getSimpleName().toString(); if (annotationName.equals("Retention")) { for (AnnotationValue value : ann.getElementValues().values()) { // Retention's value is a RetentionPolicy enum constant. VariableElement v = (VariableElement) value.getValue(); return v.getSimpleName().contentEquals(policy); } } } return false; } public static AnnotationMirror getAnnotation(Element element, Class<?> annotationClass) { return getQualifiedNamedAnnotation(element, annotationClass.getCanonicalName()); } public static boolean hasAnnotation(Element element, Class<?> annotationClass) { return getAnnotation(element, annotationClass) != null; } /** * Less strict version of the above where we don't care about the annotation's package. */ public static boolean hasNamedAnnotation(AnnotatedConstruct ac, String name) { for (AnnotationMirror annotation : getAllAnnotations(ac)) { if (getName(annotation.getAnnotationType().asElement()).equals(name)) { return true; } } return false; } /** Similar to the above but matches against a pattern. */ public static boolean hasNamedAnnotation(AnnotatedConstruct ac, Pattern pattern) { for (AnnotationMirror annotation : getAllAnnotations(ac)) { if (pattern.matcher(getName(annotation.getAnnotationType().asElement())).matches()) { return true; } } return false; } public static boolean hasQualifiedNamedAnnotation(Element element, String name) { return getQualifiedNamedAnnotation(element, name) != null; } public static AnnotationMirror getQualifiedNamedAnnotation(Element element, String name) { for (AnnotationMirror annotation : getAllAnnotations(element)) { if (getQualifiedName((TypeElement) annotation.getAnnotationType().asElement()).equals(name)) { return annotation; } } return null; } private static Iterable<? extends AnnotationMirror> getAllAnnotations(AnnotatedConstruct ac) { return Iterables.concat(ac.getAnnotationMirrors(), ExternalAnnotations.get(ac)); } /** * Return true if a binding has a named "Nullable" annotation. Package names aren't * checked because different nullable annotations are defined by several different * Java frameworks. */ public static boolean hasNullableAnnotation(Element element) { return hasNullabilityAnnotation(element, NULLABLE_PATTERN); } /** * Return true if a binding has a named "Nonnull" annotation. Package names aren't * checked because different nonnull annotations are defined in several Java * frameworks, with varying but similar names. */ public static boolean hasNonnullAnnotation(Element element) { return hasNullabilityAnnotation(element, NONNULL_PATTERN); } private static boolean hasNullabilityAnnotation(Element element, Pattern pattern) { // Ignore nullability annotation on primitive or void return types. if (isMethod(element)) { TypeKind kind = ((ExecutableElement) element).getReturnType().getKind(); if (kind.isPrimitive() || kind == TypeKind.VOID) { return false; } } TypeMirror elementType = element.asType(); if (isVariable(element) && elementType.getKind().isPrimitive()) { return false; } // The if statements cover type annotations. if (isMethod(element) && hasNamedAnnotation(((ExecutableElement) element).getReturnType(), pattern)) { return true; } if (isVariable(element)) { if (hasNamedAnnotation(elementType, pattern)) { return true; } // Annotation may be saved as a type attribute in the javac symbol. if (element instanceof VarSymbol) { SymbolMetadata metadata = ((VarSymbol) element).getMetadata(); if (metadata != null) { List<Attribute.TypeCompound> attrs = metadata.getTypeAttributes(); for (Attribute.TypeCompound attr : attrs) { if (pattern.matcher(getName(attr.type.asElement())).matches()) { return true; } } } } } if (TypeUtil.isTypeVariable(elementType)) { // Generics may be annotated as nullable and both the type variable and the bound // may be annotated. if (hasNamedAnnotation(((TypeVariable) elementType).asElement(), pattern)) { return true; } if (hasNamedAnnotation(((TypeVariable) elementType).getUpperBound(), pattern)) { return true; } } // This covers declaration annotations. return hasNamedAnnotation(element, pattern); } public static Object getAnnotationValue(AnnotationMirror annotation, String name) { for (Entry<? extends ExecutableElement, ? extends AnnotationValue> entry : annotation.getElementValues().entrySet()) { if (entry.getKey().getSimpleName().toString().equals(name)) { return entry.getValue().getValue(); } } return null; } /** * Returns an alphabetically sorted list of an annotation type's members. * This is necessary since an annotation's values can be specified in any * order, but the annotation's constructor needs to be invoked using its * declaration order. */ public static List<ExecutableElement> getSortedAnnotationMembers(TypeElement annotation) { List<ExecutableElement> members = Lists.newArrayList(getMethods(annotation)); Collections.sort(members, (m1, m2) -> getName(m1).compareTo(getName(m2))); return members; } public boolean areParametersNonnullByDefault(Element element, Options options) { if (ElementUtil.hasAnnotation(element, ParametersAreNonnullByDefault.class)) { return true; } PackageElement pkg = getPackage(element); if (ElementUtil.hasAnnotation(pkg, ParametersAreNonnullByDefault.class)) { return true; } String pkgName = pkg.getQualifiedName().toString(); return options.getPackageInfoLookup().hasParametersAreNonnullByDefault(pkgName); } public boolean isNullMarked(Element element, Options options) { if (!options.nullMarked()) { return false; } if (ElementUtil.hasAnnotation(element, NullMarked.class)) { return true; } PackageElement pkg = getPackage(element); if (ElementUtil.hasAnnotation(pkg, NullMarked.class)) { return true; } String pkgName = pkg.getQualifiedName().toString(); return options.getPackageInfoLookup().isNullMarked(pkgName); } /** * Returns true if there's a SuppressedWarning annotation with the specified warning. The * SuppressWarnings annotation can be inherited from the owning method or class, but does not have * package scope. */ @SuppressWarnings("unchecked") public static boolean suppressesWarning(String warning, Element element) { if (element == null || isPackage(element)) { return false; } AnnotationMirror annotation = getAnnotation(element, SuppressWarnings.class); if (annotation != null) { for (AnnotationValue elem : (List<? extends AnnotationValue>) getAnnotationValue(annotation, "value")) { if (warning.equals(elem.getValue())) { return true; } } } return suppressesWarning(warning, element.getEnclosingElement()); } /** * Maps an Element to a TypeMirror. element.asType() is the preferred mapping, * but sometimes type information is lost. For example, an anonymous class with * a type use annotation has the annotation in the node's type, but not in the * node's element.asType(). */ public void mapElementType(Element element, TypeMirror type) { elementTypeMap.put(element, type); } /** * Returns the associated type mirror for an element. */ public TypeMirror getType(Element element) { return elementTypeMap.containsKey(element) ? elementTypeMap.get(element) : element.asType(); } /** * Returns whether an element is marked as always being non-null. Field, method, * and parameter elements can be defined as non-null with a Nonnull annotation. * Method parameters can also be defined as non-null by annotating the owning * package or type element with the ParametersNonnullByDefault annotation. */ public static boolean isNonnull(Element element, boolean parametersNonnullByDefault) { return hasNonnullAnnotation(element) || isConstructor(element) // Java constructors are always non-null. || (isParameter(element) && parametersNonnullByDefault && !((VariableElement) element).asType().getKind().isPrimitive()); } /** * Returns the source file name for a type element. Returns null if the element * isn't a javac ClassSymbol, or if it is defined by a classfile which was compiled * without a source attribute. */ public static String getSourceFile(TypeElement type) { if (type instanceof ClassSymbol) { JavaFileObject srcFile = ((ClassSymbol) type).sourcefile; if (srcFile != null) { return srcFile.getName(); } } return null; } public TypeElement getTypeElement(String fullyQualifiedName) { return javacElements.getTypeElement(fullyQualifiedName); } }
apache/joshua
35,986
src/main/java/org/apache/joshua/decoder/ff/tm/packed/PackedGrammar.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.joshua.decoder.ff.tm.packed; /*** * This package implements Joshua's packed grammar structure, which enables the efficient loading * and accessing of grammars. It is described in the paper: * * @article{ganitkevitch2012joshua, * Author = {Ganitkevitch, J. and Cao, Y. and Weese, J. and Post, M. and Callison-Burch, C.}, * Journal = {Proceedings of WMT12}, * Title = {Joshua 4.0: Packing, PRO, and paraphrases}, * Year = {2012}} * * The packed grammar works by compiling out the grammar tries into a compact format that is loaded * and parsed directly from Java arrays. A fundamental problem is that Java arrays are indexed * by ints and not longs, meaning the maximum size of the packed grammar is about 2 GB. This forces * the use of packed grammar slices, which together constitute the grammar. The figure in the * paper above shows what each slice looks like. * * The division across slices is done in a depth-first manner. Consider the entire grammar organized * into a single source-side trie. The splits across tries are done by grouping the root-level * outgoing trie arcs --- and the entire trie beneath them --- across slices. * * This presents a problem: if the subtree rooted beneath a single top-level arc is too big for a * slice, the grammar can't be packed. This happens with very large Hiero grammars, for example, * where there are a *lot* of rules that start with [X]. * * A solution being worked on is to split that symbol and pack them into separate grammars with a * shared vocabulary, and then rely on Joshua's ability to query multiple grammars for rules to * solve this problem. This is not currently implemented but could be done directly in the * Grammar Packer. * * *UPDATE 10/2015* * The introduction of a SliceAggregatingTrie together with sorting the grammar by the full source string * (not just by the first source word) allows distributing rules with the same first source word * across multiple slices. * @author fhieber */ import static java.util.Collections.sort; import java.io.File; import java.io.FileInputStream; import java.io.IOException; import java.io.InputStream; import java.nio.BufferUnderflowException; import java.nio.ByteBuffer; import java.nio.IntBuffer; import java.nio.channels.FileChannel; import java.nio.channels.FileChannel.MapMode; import java.nio.file.Files; import java.nio.file.Paths; import java.security.DigestInputStream; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.ArrayList; import java.util.Arrays; import java.util.HashMap; import java.util.Iterator; import java.util.List; import java.util.Map; import org.apache.joshua.corpus.Vocabulary; import org.apache.joshua.decoder.JoshuaConfiguration; import org.apache.joshua.decoder.ff.FeatureFunction; import org.apache.joshua.decoder.ff.FeatureVector; import org.apache.joshua.decoder.ff.tm.AbstractGrammar; import org.apache.joshua.decoder.ff.tm.BasicRuleCollection; import org.apache.joshua.decoder.ff.tm.OwnerId; import org.apache.joshua.decoder.ff.tm.Rule; import org.apache.joshua.decoder.ff.tm.RuleCollection; import org.apache.joshua.decoder.ff.tm.Trie; import org.apache.joshua.decoder.ff.tm.hash_based.ExtensionIterator; import org.apache.joshua.util.FormatUtils; import org.apache.joshua.util.encoding.EncoderConfiguration; import org.apache.joshua.util.encoding.FloatEncoder; import org.apache.joshua.util.io.LineReader; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import com.google.common.base.Supplier; import com.google.common.base.Suppliers; import com.google.common.cache.Cache; import com.google.common.cache.CacheBuilder; public class PackedGrammar extends AbstractGrammar { private static final Logger LOG = LoggerFactory.getLogger(PackedGrammar.class); public static final String VOCABULARY_FILENAME = "vocabulary"; private EncoderConfiguration encoding; private PackedRoot root; private ArrayList<PackedSlice> slices; private final File vocabFile; // store path to vocabulary file // A rule cache for commonly used tries to avoid excess object allocations // Testing shows there's up to ~95% hit rate when cache size is 5000 Trie nodes. private final Cache<Trie, List<Rule>> cached_rules; private final String grammarDir; private JoshuaConfiguration config; public PackedGrammar(String grammar_dir, int span_limit, String owner, String type, JoshuaConfiguration joshuaConfiguration) throws IOException { super(owner, joshuaConfiguration, span_limit); this.grammarDir = grammar_dir; this.config = joshuaConfiguration; // Read the vocabulary. vocabFile = new File(grammar_dir + File.separator + VOCABULARY_FILENAME); LOG.info("Reading vocabulary: {}", vocabFile); if (!Vocabulary.read(vocabFile)) { throw new RuntimeException("mismatches or collisions while reading on-disk vocabulary"); } // Read the config String configFile = grammar_dir + File.separator + "config"; if (new File(configFile).exists()) { LOG.info("Reading packed config: {}", configFile); readConfig(configFile); } // Read the quantizer setup. LOG.info("Reading encoder configuration: {}{}encoding", grammar_dir, File.separator); encoding = new EncoderConfiguration(); encoding.load(grammar_dir + File.separator + "encoding"); final List<String> listing = Arrays.asList(new File(grammar_dir).list()); sort(listing); // File.list() has arbitrary sort order slices = new ArrayList<>(); for (String prefix : listing) { if (prefix.startsWith("slice_") && prefix.endsWith(".source")) slices.add(new PackedSlice(grammar_dir + File.separator + prefix.substring(0, 11))); } long count = 0; for (PackedSlice s : slices) count += s.estimated.length; root = new PackedRoot(slices); cached_rules = CacheBuilder.newBuilder().maximumSize(joshuaConfiguration.cachedRuleSize).build(); LOG.info("Loaded {} rules", count); } @Override public Trie getTrieRoot() { return root; } @Override public boolean hasRuleForSpan(int startIndex, int endIndex, int pathLength) { return (spanLimit == -1 || pathLength <= spanLimit); } @Override public int getNumRules() { int num_rules = 0; for (PackedSlice ps : slices) num_rules += ps.featureSize; return num_rules; } @Override public int getNumDenseFeatures() { return encoding.getNumDenseFeatures(); } /** * Computes the MD5 checksum of the vocabulary file. * Can be used for comparing vocabularies across multiple packedGrammars. * @return the computed checksum */ public String computeVocabularyChecksum() { MessageDigest md; try { md = MessageDigest.getInstance("MD5"); } catch (NoSuchAlgorithmException e) { throw new RuntimeException("Unknown checksum algorithm"); } byte[] buffer = new byte[1024]; try (final InputStream is = Files.newInputStream(Paths.get(vocabFile.toString())); DigestInputStream dis = new DigestInputStream(is, md)) { while (dis.read(buffer) != -1) {} } catch (IOException e) { throw new RuntimeException("Can not find vocabulary file. This should not happen."); } byte[] digest = md.digest(); // convert the byte to hex format StringBuffer sb = new StringBuffer(""); for (byte aDigest : digest) { sb.append(Integer.toString((aDigest & 0xff) + 0x100, 16).substring(1)); } return sb.toString(); } /** * PackedRoot represents the root of the packed grammar trie. * Tries for different source-side firstwords are organized in * packedSlices on disk. A packedSlice can contain multiple trie * roots (i.e. multiple source-side firstwords). * The PackedRoot builds a lookup table, mapping from * source-side firstwords to the addresses in the packedSlices * that represent the subtrie for a particular firstword. * If the GrammarPacker has to distribute rules for a * source-side firstword across multiple slices, a * SliceAggregatingTrie node is created that aggregates those * tries to hide * this additional complexity from the grammar interface * This feature allows packing of grammars where the list of rules * for a single source-side firstword would exceed the maximum array * size of Java (2gb). */ public static final class PackedRoot implements Trie { private final HashMap<Integer, Trie> lookup; public PackedRoot(final List<PackedSlice> slices) { final Map<Integer, List<Trie>> childTries = collectChildTries(slices); lookup = buildLookupTable(childTries); } /** * Determines whether trie nodes for source first-words are spread over * multiple packedSlices by counting their occurrences. * @param slices * @return A mapping from first word ids to a list of trie nodes. */ private Map<Integer, List<Trie>> collectChildTries(final List<PackedSlice> slices) { final Map<Integer, List<Trie>> childTries = new HashMap<>(); for (PackedSlice packedSlice : slices) { // number of tries stored in this packedSlice final int num_children = packedSlice.source[0]; for (int i = 0; i < num_children; i++) { final int id = packedSlice.source[2 * i + 1]; /* aggregate tries with same root id * obtain a Trie node, already at the correct address in the packedSlice. * In other words, the lookup index already points to the correct trie node in the packedSlice. * packedRoot.match() thus can directly return the result of lookup.get(id); */ if (!childTries.containsKey(id)) { childTries.put(id, new ArrayList<>(1)); } final Trie trie = packedSlice.root().match(id); childTries.get(id).add(trie); } } return childTries; } /** * Build a lookup table for children tries. * If the list contains only a single child node, a regular trie node * is inserted into the table; otherwise a SliceAggregatingTrie node is * created that hides this partitioning into multiple packedSlices * upstream. */ private HashMap<Integer,Trie> buildLookupTable(final Map<Integer, List<Trie>> childTries) { HashMap<Integer,Trie> lookup = new HashMap<>(childTries.size()); for (int id : childTries.keySet()) { final List<Trie> tries = childTries.get(id); if (tries.size() == 1) { lookup.put(id, tries.get(0)); } else { lookup.put(id, new SliceAggregatingTrie(tries)); } } return lookup; } @Override public Trie match(int word_id) { return lookup.get(word_id); } @Override public boolean hasExtensions() { return !lookup.isEmpty(); } @Override public HashMap<Integer, ? extends Trie> getChildren() { return lookup; } @Override public ArrayList<? extends Trie> getExtensions() { return new ArrayList<>(lookup.values()); } @Override public boolean hasRules() { return false; } @Override public RuleCollection getRuleCollection() { return new BasicRuleCollection(0, new int[0]); } @Override public Iterator<Integer> getTerminalExtensionIterator() { return new ExtensionIterator(lookup, true); } @Override public Iterator<Integer> getNonterminalExtensionIterator() { return new ExtensionIterator(lookup, false); } } public final class PackedSlice { private final String name; private final int[] source; private final IntBuffer target; private final ByteBuffer features; private final ByteBuffer alignments; private final int[] targetLookup; private int featureSize; private float[] estimated; private float[] precomputable; private final static int BUFFER_HEADER_POSITION = 8; /** * Provides a cache of packedTrie nodes to be used in getTrie. */ private HashMap<Integer, PackedTrie> tries; public PackedSlice(String prefix) throws IOException { name = prefix; File source_file = new File(prefix + ".source"); File target_file = new File(prefix + ".target"); File target_lookup_file = new File(prefix + ".target.lookup"); File feature_file = new File(prefix + ".features"); File alignment_file = new File(prefix + ".alignments"); source = fullyLoadFileToArray(source_file); // First int specifies the size of this file, load from 1st int on targetLookup = fullyLoadFileToArray(target_lookup_file, 1); target = associateMemoryMappedFile(target_file).asIntBuffer(); features = associateMemoryMappedFile(feature_file); initializeFeatureStructures(); if (alignment_file.exists()) { alignments = associateMemoryMappedFile(alignment_file); } else { alignments = null; } tries = new HashMap<>(); } /** * Helper function to help create all the structures which describe features * in the Slice. Only called during object construction. */ private void initializeFeatureStructures() { int num_blocks = features.getInt(0); estimated = new float[num_blocks]; precomputable = new float[num_blocks]; Arrays.fill(estimated, Float.NEGATIVE_INFINITY); Arrays.fill(precomputable, Float.NEGATIVE_INFINITY); featureSize = features.getInt(4); } private int getIntFromByteBuffer(int position, ByteBuffer buffer) { return buffer.getInt(BUFFER_HEADER_POSITION + (4 * position)); } private int[] fullyLoadFileToArray(File file) throws IOException { return fullyLoadFileToArray(file, 0); } /** * This function will use a bulk loading method to fully populate a target * array from file. * * @param file * File that will be read from disk. * @param startIndex * an offset into the read file. * @return an int array of size length(file) - offset containing ints in the * file. * @throws IOException */ private int[] fullyLoadFileToArray(File file, int startIndex) throws IOException { IntBuffer buffer = associateMemoryMappedFile(file).asIntBuffer(); int size = (int) (file.length() - (4 * startIndex))/4; int[] result = new int[size]; buffer.position(startIndex); buffer.get(result, 0, size); return result; } private ByteBuffer associateMemoryMappedFile(File file) throws IOException { try(FileInputStream fileInputStream = new FileInputStream(file)) { FileChannel fileChannel = fileInputStream.getChannel(); int size = (int) fileChannel.size(); return fileChannel.map(MapMode.READ_ONLY, 0, size); } } private int[] getTarget(int pointer) { // Figure out level. int tgt_length = 1; while (tgt_length < (targetLookup.length + 1) && targetLookup[tgt_length] <= pointer) tgt_length++; int[] tgt = new int[tgt_length]; int index = 0; int parent; do { parent = target.get(pointer); if (parent != -1) tgt[index++] = target.get(pointer + 1); pointer = parent; } while (pointer != -1); return tgt; } private synchronized PackedTrie getTrie(final int node_address) { PackedTrie t = tries.get(node_address); if (t == null) { t = new PackedTrie(node_address); tries.put(node_address, t); } return t; } private synchronized PackedTrie getTrie(int node_address, int[] parent_src, int parent_arity, int symbol) { PackedTrie t = tries.get(node_address); if (t == null) { t = new PackedTrie(node_address, parent_src, parent_arity, symbol); tries.put(node_address, t); } return t; } /** * Returns the FeatureVector associated with a rule (represented as a block ID). * These features are in the form "feature1=value feature2=value...". By default, unlabeled * features are named using the pattern. * @param block_id * @return feature vector */ private FeatureVector loadFeatureVector(int block_id) { int featurePosition = getIntFromByteBuffer(block_id, features); final int numFeatures = encoding.readId(features, featurePosition); featurePosition += EncoderConfiguration.ID_SIZE; final FeatureVector featureVector = new FeatureVector(); FloatEncoder encoder; String featureName; for (int i = 0; i < numFeatures; i++) { final int innerId = encoding.readId(features, featurePosition); final int outerId = encoding.outerId(innerId); encoder = encoding.encoder(innerId); // TODO (fhieber): why on earth are dense feature ids (ints) encoded in the vocabulary? featureName = Vocabulary.word(outerId); final float value = encoder.read(features, featurePosition); try { int index = Integer.parseInt(featureName); featureVector.increment(index, -value); } catch (NumberFormatException e) { featureVector.increment(featureName, value); } featurePosition += EncoderConfiguration.ID_SIZE + encoder.size(); } return featureVector; } /** * We need to synchronize this method as there is a many to one ratio between * PackedRule/PhrasePair and this class (PackedSlice). This means during concurrent first * getAlignments calls to PackedRule objects they could alter each other's positions within the * buffer before calling read on the buffer. */ private synchronized byte[] getAlignmentArray(int block_id) { if (alignments == null) throw new RuntimeException("No alignments available."); int alignment_position = getIntFromByteBuffer(block_id, alignments); int num_points = alignments.get(alignment_position); byte[] alignment = new byte[num_points * 2]; alignments.position(alignment_position + 1); try { alignments.get(alignment, 0, num_points * 2); } catch (BufferUnderflowException bue) { LOG.warn("Had an exception when accessing alignment mapped byte buffer"); LOG.warn("Attempting to access alignments at position: {}", alignment_position + 1); LOG.warn("And to read this many bytes: {}", num_points * 2); LOG.warn("Buffer capacity is : {}", alignments.capacity()); LOG.warn("Buffer position is : {}", alignments.position()); LOG.warn("Buffer limit is : {}", alignments.limit()); throw bue; } return alignment; } private PackedTrie root() { return getTrie(0); } @Override public String toString() { return name; } /** * A trie node within the grammar slice. Identified by its position within the source array, * and, as a supplement, the source string leading from the trie root to the node. * * @author jg * */ public class PackedTrie implements Trie, RuleCollection { private final int position; private boolean sorted = false; private final int[] src; private int arity; private PackedTrie(int position) { this.position = position; src = new int[0]; arity = 0; } private PackedTrie(int position, int[] parent_src, int parent_arity, int symbol) { this.position = position; src = new int[parent_src.length + 1]; System.arraycopy(parent_src, 0, src, 0, parent_src.length); src[src.length - 1] = symbol; arity = parent_arity; if (FormatUtils.isNonterminal(symbol)) arity++; } @Override public final Trie match(int token_id) { int num_children = source[position]; if (num_children == 0) return null; if (num_children == 1 && token_id == source[position + 1]) return getTrie(source[position + 2], src, arity, token_id); int top = 0; int bottom = num_children - 1; while (true) { int candidate = (top + bottom) / 2; int candidate_position = position + 1 + 2 * candidate; int read_token = source[candidate_position]; if (read_token == token_id) { return getTrie(source[candidate_position + 1], src, arity, token_id); } else if (top == bottom) { return null; } else if (read_token > token_id) { top = candidate + 1; } else { bottom = candidate - 1; } if (bottom < top) return null; } } @Override public HashMap<Integer, ? extends Trie> getChildren() { HashMap<Integer, Trie> children = new HashMap<>(); int num_children = source[position]; for (int i = 0; i < num_children; i++) { int symbol = source[position + 1 + 2 * i]; int address = source[position + 2 + 2 * i]; children.put(symbol, getTrie(address, src, arity, symbol)); } return children; } @Override public boolean hasExtensions() { return (source[position] != 0); } @Override public ArrayList<? extends Trie> getExtensions() { int num_children = source[position]; ArrayList<PackedTrie> tries = new ArrayList<>(num_children); for (int i = 0; i < num_children; i++) { int symbol = source[position + 1 + 2 * i]; int address = source[position + 2 + 2 * i]; tries.add(getTrie(address, src, arity, symbol)); } return tries; } @Override public boolean hasRules() { int num_children = source[position]; return (source[position + 1 + 2 * num_children] != 0); } @Override public RuleCollection getRuleCollection() { return this; } @Override public List<Rule> getRules() { List<Rule> rules = cached_rules.getIfPresent(this); if (rules != null) { return rules; } int num_children = source[position]; int rule_position = position + 2 * (num_children + 1); int num_rules = source[rule_position - 1]; rules = new ArrayList<>(num_rules); for (int i = 0; i < num_rules; i++) { rules.add(new PackedRule(rule_position + 3 * i)); } cached_rules.put(this, rules); return rules; } /** * We determine if the Trie is sorted by checking if the estimated cost of the first rule in * the trie has been set. */ @Override public boolean isSorted() { return sorted; } private synchronized void sortRules(List<FeatureFunction> models) { int num_children = source[position]; int rule_position = position + 2 * (num_children + 1); int num_rules = source[rule_position - 1]; if (num_rules == 0) { this.sorted = true; return; } Integer[] rules = new Integer[num_rules]; int target_address; int block_id; for (int i = 0; i < num_rules; ++i) { target_address = source[rule_position + 1 + 3 * i]; rules[i] = rule_position + 2 + 3 * i; block_id = source[rules[i]]; Rule rule = new Rule(source[rule_position + 3 * i], src, getTarget(target_address), loadFeatureVector(block_id), arity, owner); estimated[block_id] = rule.estimateRuleCost(models); precomputable[block_id] = rule.getPrecomputableCost(); } Arrays.sort(rules, (a, b) -> { float a_cost = estimated[source[a]]; float b_cost = estimated[source[b]]; if (a_cost == b_cost) return 0; return (a_cost > b_cost ? -1 : 1); }); int[] sorted = new int[3 * num_rules]; int j = 0; for (Integer address : rules) { sorted[j++] = source[address - 2]; sorted[j++] = source[address - 1]; sorted[j++] = source[address]; } System.arraycopy(sorted, 0, source, rule_position + 0, sorted.length); // Replace rules in cache with their sorted values on next getRules() cached_rules.invalidate(this); this.sorted = true; } @Override public List<Rule> getSortedRules(List<FeatureFunction> featureFunctions) { if (!isSorted()) sortRules(featureFunctions); return getRules(); } @Override public int[] getSourceSide() { return src; } @Override public int getArity() { return arity; } @Override public Iterator<Integer> getTerminalExtensionIterator() { return new PackedChildIterator(position, true); } @Override public Iterator<Integer> getNonterminalExtensionIterator() { return new PackedChildIterator(position, false); } public final class PackedChildIterator implements Iterator<Integer> { private int current; private final boolean terminal; private boolean done; private int last; PackedChildIterator(int position, boolean terminal) { this.terminal = terminal; int num_children = source[position]; done = (num_children == 0); if (!done) { current = (terminal ? position + 1 : position - 1 + 2 * num_children); last = (terminal ? position - 1 + 2 * num_children : position + 1); } } @Override public boolean hasNext() { if (done) return false; int next = (terminal ? current + 2 : current - 2); if (next == last) return false; return (terminal ? source[next] > 0 : source[next] < 0); } @Override public Integer next() { if (done) throw new RuntimeException("No more symbols!"); int symbol = source[current]; if (current == last) done = true; if (!done) { current = (terminal ? current + 2 : current - 2); done = (terminal ? source[current] < 0 : source[current] > 0); } return symbol; } @Override public void remove() { throw new UnsupportedOperationException(); } } /** * A packed phrase pair represents a rule of the form of a phrase pair, packed with the * grammar-packer.pl script, which simply adds a nonterminal [X] to the left-hand side of * all phrase pairs (and converts the Moses features). The packer then packs these. We have * to then put a nonterminal on the source and target sides to treat the phrase pairs like * left-branching rules, which is how Joshua deals with phrase decoding. * * @author Matt Post post@cs.jhu.edu * */ public final class PackedPhrasePair extends PackedRule { private final Supplier<int[]> englishSupplier; private final Supplier<byte[]> alignmentSupplier; public PackedPhrasePair(int address) { super(address); englishSupplier = initializeEnglishSupplier(); alignmentSupplier = initializeAlignmentSupplier(); } @Override public int getArity() { return PackedTrie.this.getArity() + 1; } /** * Initialize a number of suppliers which get evaluated when their respective getters * are called. * Inner lambda functions are guaranteed to only be called once, because of this underlying * structures are accessed in a threadsafe way. * Guava's implementation makes sure only one read of a volatile variable occurs per get. * This means this implementation should be as thread-safe and performant as possible. */ private Supplier<int[]> initializeEnglishSupplier(){ return Suppliers.memoize(() ->{ int[] phrase = getTarget(source[address + 1]); int[] tgt = new int[phrase.length + 1]; tgt[0] = -1; for (int i = 0; i < phrase.length; i++) tgt[i+1] = phrase[i]; return tgt; }); } private Supplier<byte[]> initializeAlignmentSupplier(){ return Suppliers.memoize(() ->{ byte[] raw_alignment = getAlignmentArray(source[address + 2]); byte[] points = new byte[raw_alignment.length + 2]; points[0] = points[1] = 0; for (int i = 0; i < raw_alignment.length; i++) points[i + 2] = (byte) (raw_alignment[i] + 1); return points; }); } /** * Take the English phrase of the underlying rule and prepend an [X]. * * @return the augmented phrase */ @Override public int[] getEnglish() { return this.englishSupplier.get(); } /** * Take the French phrase of the underlying rule and prepend an [X]. * * @return the augmented French phrase */ @Override public int[] getFrench() { int phrase[] = new int[src.length + 1]; int ntid = Vocabulary.id(PackedGrammar.this.joshuaConfiguration.default_non_terminal); phrase[0] = ntid; System.arraycopy(src, 0, phrase, 1, src.length); return phrase; } /** * Similarly the alignment array needs to be shifted over by one. * * @return the byte[] alignment */ @Override public byte[] getAlignment() { // if no alignments in grammar do not fail if (alignments == null) { return null; } return this.alignmentSupplier.get(); } } public class PackedRule extends Rule { protected final int address; private final Supplier<int[]> englishSupplier; private final Supplier<FeatureVector> featureVectorSupplier; private final Supplier<byte[]> alignmentsSupplier; public PackedRule(int address) { this.address = address; this.englishSupplier = intializeEnglishSupplier(); this.featureVectorSupplier = initializeFeatureVectorSupplier(); this.alignmentsSupplier = initializeAlignmentsSupplier(); } private Supplier<int[]> intializeEnglishSupplier(){ return Suppliers.memoize(() ->{ return getTarget(source[address + 1]); }); } private Supplier<FeatureVector> initializeFeatureVectorSupplier(){ return Suppliers.memoize(() ->{ return loadFeatureVector(source[address + 2]); }); } private Supplier<byte[]> initializeAlignmentsSupplier(){ return Suppliers.memoize(()->{ // if no alignments in grammar do not fail if (alignments == null){ return null; } return getAlignmentArray(source[address + 2]); }); } @Override public void setArity(int arity) { } @Override public int getArity() { return PackedTrie.this.getArity(); } @Override public void setOwner(OwnerId owner) { } @Override public OwnerId getOwner() { return owner; } @Override public void setLHS(int lhs) { } @Override public int getLHS() { return source[address]; } @Override public void setEnglish(int[] eng) { } @Override public int[] getEnglish() { return this.englishSupplier.get(); } @Override public void setFrench(int[] french) { } @Override public int[] getFrench() { return src; } @Override public FeatureVector getFeatureVector() { return this.featureVectorSupplier.get(); } @Override public byte[] getAlignment() { return this.alignmentsSupplier.get(); } @Override public String getAlignmentString() { throw new RuntimeException("AlignmentString not implemented for PackedRule!"); } @Override public float getEstimatedCost() { return estimated[source[address + 2]]; } // @Override // public void setPrecomputableCost(float cost) { // precomputable[source[address + 2]] = cost; // } @Override public float getPrecomputableCost() { return precomputable[source[address + 2]]; } @Override public float estimateRuleCost(List<FeatureFunction> models) { return estimated[source[address + 2]]; } @Override public String toString() { String sb = Vocabulary.word(this.getLHS()) + " ||| " + getFrenchWords() + " ||| " + getEnglishWords() + " |||" + " " + getFeatureVector() + String.format(" ||| %.3f", getEstimatedCost()); return sb; } } } } @Override public void addOOVRules(int word, List<FeatureFunction> featureFunctions) { throw new RuntimeException("PackedGrammar.addOOVRules(): I can't add OOV rules"); } @Override public void addRule(Rule rule) { throw new RuntimeException("PackedGrammar.addRule(): I can't add rules"); } @Override public void save() { throw new RuntimeException("PackedGrammar.save(): I can't be saved"); } /** * Read the config file * * TODO: this should be rewritten using typeconfig. * * @param config * @throws IOException */ private void readConfig(String config) throws IOException { int version = 2; for (String line: new LineReader(config)) { String[] tokens = line.split(" = "); if (tokens[0].equals("max-source-len")) this.maxSourcePhraseLength = Integer.parseInt(tokens[1]); else if (tokens[0].equals("version")) { version = Integer.parseInt(tokens[1]); } } if (! isSupportedVersion(version)) { String message = String.format("The grammar at %s was packed with packer version %d, which is incompatible with the current config", this.grammarDir, version); throw new RuntimeException(message); } } /* * Determines whether the current grammar is a supported version. For hierarchical decoding, * no changes have occurred, so any version past 2 (the default) is supported. For phrase- * based decoding, version 4 is required. */ private boolean isSupportedVersion(int version) { return (config.search_algorithm.equals("cky") && version >= 2) || (version >= 4); } }
google/filament
35,973
third_party/dawn/third_party/protobuf/java/core/src/main/java/com/google/protobuf/CodedOutputStreamWriter.java
// Protocol Buffers - Google's data interchange format // Copyright 2008 Google Inc. All rights reserved. // // Use of this source code is governed by a BSD-style // license that can be found in the LICENSE file or at // https://developers.google.com/open-source/licenses/bsd package com.google.protobuf; import static com.google.protobuf.Internal.checkNotNull; import static com.google.protobuf.WireFormat.WIRETYPE_LENGTH_DELIMITED; import java.io.IOException; import java.util.Arrays; import java.util.List; import java.util.Map; /** An adapter between the {@link Writer} interface and {@link CodedOutputStream}. */ @CheckReturnValue @ExperimentalApi final class CodedOutputStreamWriter implements Writer { private final CodedOutputStream output; public static CodedOutputStreamWriter forCodedOutput(CodedOutputStream output) { if (output.wrapper != null) { return output.wrapper; } return new CodedOutputStreamWriter(output); } private CodedOutputStreamWriter(CodedOutputStream output) { this.output = checkNotNull(output, "output"); this.output.wrapper = this; } @Override public FieldOrder fieldOrder() { return FieldOrder.ASCENDING; } public int getTotalBytesWritten() { return output.getTotalBytesWritten(); } @Override public void writeSFixed32(int fieldNumber, int value) throws IOException { output.writeSFixed32(fieldNumber, value); } @Override public void writeInt64(int fieldNumber, long value) throws IOException { output.writeInt64(fieldNumber, value); } @Override public void writeSFixed64(int fieldNumber, long value) throws IOException { output.writeSFixed64(fieldNumber, value); } @Override public void writeFloat(int fieldNumber, float value) throws IOException { output.writeFloat(fieldNumber, value); } @Override public void writeDouble(int fieldNumber, double value) throws IOException { output.writeDouble(fieldNumber, value); } @Override public void writeEnum(int fieldNumber, int value) throws IOException { output.writeEnum(fieldNumber, value); } @Override public void writeUInt64(int fieldNumber, long value) throws IOException { output.writeUInt64(fieldNumber, value); } @Override public void writeInt32(int fieldNumber, int value) throws IOException { output.writeInt32(fieldNumber, value); } @Override public void writeFixed64(int fieldNumber, long value) throws IOException { output.writeFixed64(fieldNumber, value); } @Override public void writeFixed32(int fieldNumber, int value) throws IOException { output.writeFixed32(fieldNumber, value); } @Override public void writeBool(int fieldNumber, boolean value) throws IOException { output.writeBool(fieldNumber, value); } @Override public void writeString(int fieldNumber, String value) throws IOException { output.writeString(fieldNumber, value); } @Override public void writeBytes(int fieldNumber, ByteString value) throws IOException { output.writeBytes(fieldNumber, value); } @Override public void writeUInt32(int fieldNumber, int value) throws IOException { output.writeUInt32(fieldNumber, value); } @Override public void writeSInt32(int fieldNumber, int value) throws IOException { output.writeSInt32(fieldNumber, value); } @Override public void writeSInt64(int fieldNumber, long value) throws IOException { output.writeSInt64(fieldNumber, value); } @Override public void writeMessage(int fieldNumber, Object value) throws IOException { output.writeMessage(fieldNumber, (MessageLite) value); } @Override public void writeMessage(int fieldNumber, Object value, Schema schema) throws IOException { output.writeMessage(fieldNumber, (MessageLite) value, schema); } @Deprecated @Override public void writeGroup(int fieldNumber, Object value) throws IOException { output.writeGroup(fieldNumber, (MessageLite) value); } @Override public void writeGroup(int fieldNumber, Object value, Schema schema) throws IOException { output.writeGroup(fieldNumber, (MessageLite) value, schema); } @Deprecated @Override public void writeStartGroup(int fieldNumber) throws IOException { output.writeTag(fieldNumber, WireFormat.WIRETYPE_START_GROUP); } @Deprecated @Override public void writeEndGroup(int fieldNumber) throws IOException { output.writeTag(fieldNumber, WireFormat.WIRETYPE_END_GROUP); } @Override public final void writeMessageSetItem(int fieldNumber, Object value) throws IOException { if (value instanceof ByteString) { output.writeRawMessageSetExtension(fieldNumber, (ByteString) value); } else { output.writeMessageSetExtension(fieldNumber, (MessageLite) value); } } @Override public void writeInt32List(int fieldNumber, List<Integer> value, boolean packed) throws IOException { if (value instanceof IntArrayList) { writeInt32ListInternal(fieldNumber, (IntArrayList) value, packed); } else { writeInt32ListInternal(fieldNumber, value, packed); } } private void writeInt32ListInternal(int fieldNumber, IntArrayList value, boolean packed) throws IOException { if (packed) { output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); // Compute and write the length of the data. int dataSize = 0; for (int i = 0; i < value.size(); ++i) { dataSize += CodedOutputStream.computeInt32SizeNoTag(value.getInt(i)); } output.writeUInt32NoTag(dataSize); // Write the data itself, without any tags. for (int i = 0; i < value.size(); ++i) { output.writeInt32NoTag(value.getInt(i)); } } else { for (int i = 0; i < value.size(); ++i) { output.writeInt32(fieldNumber, value.getInt(i)); } } } private void writeInt32ListInternal(int fieldNumber, List<Integer> value, boolean packed) throws IOException { if (packed) { output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); // Compute and write the length of the data. int dataSize = 0; for (int i = 0; i < value.size(); ++i) { dataSize += CodedOutputStream.computeInt32SizeNoTag(value.get(i)); } output.writeUInt32NoTag(dataSize); // Write the data itself, without any tags. for (int i = 0; i < value.size(); ++i) { output.writeInt32NoTag(value.get(i)); } } else { for (int i = 0; i < value.size(); ++i) { output.writeInt32(fieldNumber, value.get(i)); } } } @Override public void writeFixed32List(int fieldNumber, List<Integer> value, boolean packed) throws IOException { if (value instanceof IntArrayList) { writeFixed32ListInternal(fieldNumber, (IntArrayList) value, packed); } else { writeFixed32ListInternal(fieldNumber, value, packed); } } private void writeFixed32ListInternal(int fieldNumber, IntArrayList value, boolean packed) throws IOException { if (packed) { output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); // Compute and write the length of the data. int dataSize = 0; for (int i = 0; i < value.size(); ++i) { dataSize += CodedOutputStream.computeFixed32SizeNoTag(value.getInt(i)); } output.writeUInt32NoTag(dataSize); // Write the data itself, without any tags. for (int i = 0; i < value.size(); ++i) { output.writeFixed32NoTag(value.getInt(i)); } } else { for (int i = 0; i < value.size(); ++i) { output.writeFixed32(fieldNumber, value.getInt(i)); } } } private void writeFixed32ListInternal(int fieldNumber, List<Integer> value, boolean packed) throws IOException { if (packed) { output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); // Compute and write the length of the data. int dataSize = 0; for (int i = 0; i < value.size(); ++i) { dataSize += CodedOutputStream.computeFixed32SizeNoTag(value.get(i)); } output.writeUInt32NoTag(dataSize); // Write the data itself, without any tags. for (int i = 0; i < value.size(); ++i) { output.writeFixed32NoTag(value.get(i)); } } else { for (int i = 0; i < value.size(); ++i) { output.writeFixed32(fieldNumber, value.get(i)); } } } @Override public void writeInt64List(int fieldNumber, List<Long> value, boolean packed) throws IOException { if (value instanceof LongArrayList) { writeInt64ListInternal(fieldNumber, (LongArrayList) value, packed); } else { writeInt64ListInternal(fieldNumber, value, packed); } } private void writeInt64ListInternal(int fieldNumber, LongArrayList value, boolean packed) throws IOException { if (packed) { output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); // Compute and write the length of the data. int dataSize = 0; for (int i = 0; i < value.size(); ++i) { dataSize += CodedOutputStream.computeInt64SizeNoTag(value.getLong(i)); } output.writeUInt32NoTag(dataSize); // Write the data itself, without any tags. for (int i = 0; i < value.size(); ++i) { output.writeInt64NoTag(value.getLong(i)); } } else { for (int i = 0; i < value.size(); ++i) { output.writeInt64(fieldNumber, value.getLong(i)); } } } private void writeInt64ListInternal(int fieldNumber, List<Long> value, boolean packed) throws IOException { if (packed) { output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); // Compute and write the length of the data. int dataSize = 0; for (int i = 0; i < value.size(); ++i) { dataSize += CodedOutputStream.computeInt64SizeNoTag(value.get(i)); } output.writeUInt32NoTag(dataSize); // Write the data itself, without any tags. for (int i = 0; i < value.size(); ++i) { output.writeInt64NoTag(value.get(i)); } } else { for (int i = 0; i < value.size(); ++i) { output.writeInt64(fieldNumber, value.get(i)); } } } @Override public void writeUInt64List(int fieldNumber, List<Long> value, boolean packed) throws IOException { if (value instanceof LongArrayList) { writeUInt64ListInternal(fieldNumber, (LongArrayList) value, packed); } else { writeUInt64ListInternal(fieldNumber, value, packed); } } private void writeUInt64ListInternal(int fieldNumber, LongArrayList value, boolean packed) throws IOException { if (packed) { output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); // Compute and write the length of the data. int dataSize = 0; for (int i = 0; i < value.size(); ++i) { dataSize += CodedOutputStream.computeUInt64SizeNoTag(value.getLong(i)); } output.writeUInt32NoTag(dataSize); // Write the data itself, without any tags. for (int i = 0; i < value.size(); ++i) { output.writeUInt64NoTag(value.getLong(i)); } } else { for (int i = 0; i < value.size(); ++i) { output.writeUInt64(fieldNumber, value.getLong(i)); } } } private void writeUInt64ListInternal(int fieldNumber, List<Long> value, boolean packed) throws IOException { if (packed) { output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); // Compute and write the length of the data. int dataSize = 0; for (int i = 0; i < value.size(); ++i) { dataSize += CodedOutputStream.computeUInt64SizeNoTag(value.get(i)); } output.writeUInt32NoTag(dataSize); // Write the data itself, without any tags. for (int i = 0; i < value.size(); ++i) { output.writeUInt64NoTag(value.get(i)); } } else { for (int i = 0; i < value.size(); ++i) { output.writeUInt64(fieldNumber, value.get(i)); } } } @Override public void writeFixed64List(int fieldNumber, List<Long> value, boolean packed) throws IOException { if (value instanceof LongArrayList) { writeFixed64ListInternal(fieldNumber, (LongArrayList) value, packed); } else { writeFixed64ListInternal(fieldNumber, value, packed); } } private void writeFixed64ListInternal(int fieldNumber, LongArrayList value, boolean packed) throws IOException { if (packed) { output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); // Compute and write the length of the data. int dataSize = 0; for (int i = 0; i < value.size(); ++i) { dataSize += CodedOutputStream.computeFixed64SizeNoTag(value.getLong(i)); } output.writeUInt32NoTag(dataSize); // Write the data itself, without any tags. for (int i = 0; i < value.size(); ++i) { output.writeFixed64NoTag(value.getLong(i)); } } else { for (int i = 0; i < value.size(); ++i) { output.writeFixed64(fieldNumber, value.getLong(i)); } } } private void writeFixed64ListInternal(int fieldNumber, List<Long> value, boolean packed) throws IOException { if (packed) { output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); // Compute and write the length of the data. int dataSize = 0; for (int i = 0; i < value.size(); ++i) { dataSize += CodedOutputStream.computeFixed64SizeNoTag(value.get(i)); } output.writeUInt32NoTag(dataSize); // Write the data itself, without any tags. for (int i = 0; i < value.size(); ++i) { output.writeFixed64NoTag(value.get(i)); } } else { for (int i = 0; i < value.size(); ++i) { output.writeFixed64(fieldNumber, value.get(i)); } } } @Override public void writeFloatList(int fieldNumber, List<Float> value, boolean packed) throws IOException { if (value instanceof FloatArrayList) { writeFloatListInternal(fieldNumber, (FloatArrayList) value, packed); } else { writeFloatListInternal(fieldNumber, value, packed); } } private void writeFloatListInternal(int fieldNumber, FloatArrayList value, boolean packed) throws IOException { if (packed) { output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); // Compute and write the length of the data. int dataSize = 0; for (int i = 0; i < value.size(); ++i) { dataSize += CodedOutputStream.computeFloatSizeNoTag(value.getFloat(i)); } output.writeUInt32NoTag(dataSize); // Write the data itself, without any tags. for (int i = 0; i < value.size(); ++i) { output.writeFloatNoTag(value.getFloat(i)); } } else { for (int i = 0; i < value.size(); ++i) { output.writeFloat(fieldNumber, value.getFloat(i)); } } } private void writeFloatListInternal(int fieldNumber, List<Float> value, boolean packed) throws IOException { if (packed) { output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); // Compute and write the length of the data. int dataSize = 0; for (int i = 0; i < value.size(); ++i) { dataSize += CodedOutputStream.computeFloatSizeNoTag(value.get(i)); } output.writeUInt32NoTag(dataSize); // Write the data itself, without any tags. for (int i = 0; i < value.size(); ++i) { output.writeFloatNoTag(value.get(i)); } } else { for (int i = 0; i < value.size(); ++i) { output.writeFloat(fieldNumber, value.get(i)); } } } @Override public void writeDoubleList(int fieldNumber, List<Double> value, boolean packed) throws IOException { if (value instanceof DoubleArrayList) { writeDoubleListInternal(fieldNumber, (DoubleArrayList) value, packed); } else { writeDoubleListInternal(fieldNumber, value, packed); } } private void writeDoubleListInternal(int fieldNumber, DoubleArrayList value, boolean packed) throws IOException { if (packed) { output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); // Compute and write the length of the data. int dataSize = 0; for (int i = 0; i < value.size(); ++i) { dataSize += CodedOutputStream.computeDoubleSizeNoTag(value.getDouble(i)); } output.writeUInt32NoTag(dataSize); // Write the data itself, without any tags. for (int i = 0; i < value.size(); ++i) { output.writeDoubleNoTag(value.getDouble(i)); } } else { for (int i = 0; i < value.size(); ++i) { output.writeDouble(fieldNumber, value.getDouble(i)); } } } private void writeDoubleListInternal(int fieldNumber, List<Double> value, boolean packed) throws IOException { if (packed) { output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); // Compute and write the length of the data. int dataSize = 0; for (int i = 0; i < value.size(); ++i) { dataSize += CodedOutputStream.computeDoubleSizeNoTag(value.get(i)); } output.writeUInt32NoTag(dataSize); // Write the data itself, without any tags. for (int i = 0; i < value.size(); ++i) { output.writeDoubleNoTag(value.get(i)); } } else { for (int i = 0; i < value.size(); ++i) { output.writeDouble(fieldNumber, value.get(i)); } } } @Override public void writeEnumList(int fieldNumber, List<Integer> value, boolean packed) throws IOException { if (value instanceof IntArrayList) { writeEnumListInternal(fieldNumber, (IntArrayList) value, packed); } else { writeEnumListInternal(fieldNumber, value, packed); } } private void writeEnumListInternal(int fieldNumber, IntArrayList value, boolean packed) throws IOException { if (packed) { output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); // Compute and write the length of the data. int dataSize = 0; for (int i = 0; i < value.size(); ++i) { dataSize += CodedOutputStream.computeEnumSizeNoTag(value.getInt(i)); } output.writeUInt32NoTag(dataSize); // Write the data itself, without any tags. for (int i = 0; i < value.size(); ++i) { output.writeEnumNoTag(value.getInt(i)); } } else { for (int i = 0; i < value.size(); ++i) { output.writeEnum(fieldNumber, value.getInt(i)); } } } private void writeEnumListInternal(int fieldNumber, List<Integer> value, boolean packed) throws IOException { if (packed) { output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); // Compute and write the length of the data. int dataSize = 0; for (int i = 0; i < value.size(); ++i) { dataSize += CodedOutputStream.computeEnumSizeNoTag(value.get(i)); } output.writeUInt32NoTag(dataSize); // Write the data itself, without any tags. for (int i = 0; i < value.size(); ++i) { output.writeEnumNoTag(value.get(i)); } } else { for (int i = 0; i < value.size(); ++i) { output.writeEnum(fieldNumber, value.get(i)); } } } @Override public void writeBoolList(int fieldNumber, List<Boolean> value, boolean packed) throws IOException { if (value instanceof BooleanArrayList) { writeBoolListInternal(fieldNumber, (BooleanArrayList) value, packed); } else { writeBoolListInternal(fieldNumber, value, packed); } } private void writeBoolListInternal(int fieldNumber, BooleanArrayList value, boolean packed) throws IOException { if (packed) { output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); // Compute and write the length of the data. int dataSize = 0; for (int i = 0; i < value.size(); ++i) { dataSize += CodedOutputStream.computeBoolSizeNoTag(value.getBoolean(i)); } output.writeUInt32NoTag(dataSize); // Write the data itself, without any tags. for (int i = 0; i < value.size(); ++i) { output.writeBoolNoTag(value.getBoolean(i)); } } else { for (int i = 0; i < value.size(); ++i) { output.writeBool(fieldNumber, value.getBoolean(i)); } } } private void writeBoolListInternal(int fieldNumber, List<Boolean> value, boolean packed) throws IOException { if (packed) { output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); // Compute and write the length of the data. int dataSize = 0; for (int i = 0; i < value.size(); ++i) { dataSize += CodedOutputStream.computeBoolSizeNoTag(value.get(i)); } output.writeUInt32NoTag(dataSize); // Write the data itself, without any tags. for (int i = 0; i < value.size(); ++i) { output.writeBoolNoTag(value.get(i)); } } else { for (int i = 0; i < value.size(); ++i) { output.writeBool(fieldNumber, value.get(i)); } } } @Override public void writeStringList(int fieldNumber, List<String> value) throws IOException { if (value instanceof LazyStringList) { final LazyStringList lazyList = (LazyStringList) value; for (int i = 0; i < value.size(); ++i) { writeLazyString(fieldNumber, lazyList.getRaw(i)); } } else { for (int i = 0; i < value.size(); ++i) { output.writeString(fieldNumber, value.get(i)); } } } private void writeLazyString(int fieldNumber, Object value) throws IOException { if (value instanceof String) { output.writeString(fieldNumber, (String) value); } else { output.writeBytes(fieldNumber, (ByteString) value); } } @Override public void writeBytesList(int fieldNumber, List<ByteString> value) throws IOException { for (int i = 0; i < value.size(); ++i) { output.writeBytes(fieldNumber, value.get(i)); } } @Override public void writeUInt32List(int fieldNumber, List<Integer> value, boolean packed) throws IOException { if (value instanceof IntArrayList) { writeUInt32ListInternal(fieldNumber, (IntArrayList) value, packed); } else { writeUInt32ListInternal(fieldNumber, value, packed); } } private void writeUInt32ListInternal(int fieldNumber, IntArrayList value, boolean packed) throws IOException { if (packed) { output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); // Compute and write the length of the data. int dataSize = 0; for (int i = 0; i < value.size(); ++i) { dataSize += CodedOutputStream.computeUInt32SizeNoTag(value.getInt(i)); } output.writeUInt32NoTag(dataSize); // Write the data itself, without any tags. for (int i = 0; i < value.size(); ++i) { output.writeUInt32NoTag(value.getInt(i)); } } else { for (int i = 0; i < value.size(); ++i) { output.writeUInt32(fieldNumber, value.getInt(i)); } } } public void writeUInt32ListInternal(int fieldNumber, List<Integer> value, boolean packed) throws IOException { if (packed) { output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); // Compute and write the length of the data. int dataSize = 0; for (int i = 0; i < value.size(); ++i) { dataSize += CodedOutputStream.computeUInt32SizeNoTag(value.get(i)); } output.writeUInt32NoTag(dataSize); // Write the data itself, without any tags. for (int i = 0; i < value.size(); ++i) { output.writeUInt32NoTag(value.get(i)); } } else { for (int i = 0; i < value.size(); ++i) { output.writeUInt32(fieldNumber, value.get(i)); } } } @Override public void writeSFixed32List(int fieldNumber, List<Integer> value, boolean packed) throws IOException { if (value instanceof IntArrayList) { writeSFixed32ListInternal(fieldNumber, (IntArrayList) value, packed); } else { writeSFixed32ListInternal(fieldNumber, value, packed); } } private void writeSFixed32ListInternal(int fieldNumber, IntArrayList value, boolean packed) throws IOException { if (packed) { output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); // Compute and write the length of the data. int dataSize = 0; for (int i = 0; i < value.size(); ++i) { dataSize += CodedOutputStream.computeSFixed32SizeNoTag(value.getInt(i)); } output.writeUInt32NoTag(dataSize); // Write the data itself, without any tags. for (int i = 0; i < value.size(); ++i) { output.writeSFixed32NoTag(value.getInt(i)); } } else { for (int i = 0; i < value.size(); ++i) { output.writeSFixed32(fieldNumber, value.getInt(i)); } } } private void writeSFixed32ListInternal(int fieldNumber, List<Integer> value, boolean packed) throws IOException { if (packed) { output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); // Compute and write the length of the data. int dataSize = 0; for (int i = 0; i < value.size(); ++i) { dataSize += CodedOutputStream.computeSFixed32SizeNoTag(value.get(i)); } output.writeUInt32NoTag(dataSize); // Write the data itself, without any tags. for (int i = 0; i < value.size(); ++i) { output.writeSFixed32NoTag(value.get(i)); } } else { for (int i = 0; i < value.size(); ++i) { output.writeSFixed32(fieldNumber, value.get(i)); } } } @Override public void writeSFixed64List(int fieldNumber, List<Long> value, boolean packed) throws IOException { if (value instanceof LongArrayList) { writeSFixed64ListInternal(fieldNumber, (LongArrayList) value, packed); } else { writeSFixed64ListInternal(fieldNumber, value, packed); } } private void writeSFixed64ListInternal(int fieldNumber, LongArrayList value, boolean packed) throws IOException { if (packed) { output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); // Compute and write the length of the data. int dataSize = 0; for (int i = 0; i < value.size(); ++i) { dataSize += CodedOutputStream.computeSFixed64SizeNoTag(value.getLong(i)); } output.writeUInt32NoTag(dataSize); // Write the data itself, without any tags. for (int i = 0; i < value.size(); ++i) { output.writeSFixed64NoTag(value.getLong(i)); } } else { for (int i = 0; i < value.size(); ++i) { output.writeSFixed64(fieldNumber, value.getLong(i)); } } } private void writeSFixed64ListInternal(int fieldNumber, List<Long> value, boolean packed) throws IOException { if (packed) { output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); // Compute and write the length of the data. int dataSize = 0; for (int i = 0; i < value.size(); ++i) { dataSize += CodedOutputStream.computeSFixed64SizeNoTag(value.get(i)); } output.writeUInt32NoTag(dataSize); // Write the data itself, without any tags. for (int i = 0; i < value.size(); ++i) { output.writeSFixed64NoTag(value.get(i)); } } else { for (int i = 0; i < value.size(); ++i) { output.writeSFixed64(fieldNumber, value.get(i)); } } } @Override public void writeSInt32List(int fieldNumber, List<Integer> value, boolean packed) throws IOException { if (value instanceof IntArrayList) { writeSInt32ListInternal(fieldNumber, (IntArrayList) value, packed); } else { writeSInt32ListInternal(fieldNumber, value, packed); } } private void writeSInt32ListInternal(int fieldNumber, IntArrayList value, boolean packed) throws IOException { if (packed) { output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); // Compute and write the length of the data. int dataSize = 0; for (int i = 0; i < value.size(); ++i) { dataSize += CodedOutputStream.computeSInt32SizeNoTag(value.getInt(i)); } output.writeUInt32NoTag(dataSize); // Write the data itself, without any tags. for (int i = 0; i < value.size(); ++i) { output.writeSInt32NoTag(value.getInt(i)); } } else { for (int i = 0; i < value.size(); ++i) { output.writeSInt32(fieldNumber, value.getInt(i)); } } } public void writeSInt32ListInternal(int fieldNumber, List<Integer> value, boolean packed) throws IOException { if (packed) { output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); // Compute and write the length of the data. int dataSize = 0; for (int i = 0; i < value.size(); ++i) { dataSize += CodedOutputStream.computeSInt32SizeNoTag(value.get(i)); } output.writeUInt32NoTag(dataSize); // Write the data itself, without any tags. for (int i = 0; i < value.size(); ++i) { output.writeSInt32NoTag(value.get(i)); } } else { for (int i = 0; i < value.size(); ++i) { output.writeSInt32(fieldNumber, value.get(i)); } } } @Override public void writeSInt64List(int fieldNumber, List<Long> value, boolean packed) throws IOException { if (value instanceof LongArrayList) { writeSInt64ListInternal(fieldNumber, (LongArrayList) value, packed); } else { writeSInt64ListInternal(fieldNumber, value, packed); } } private void writeSInt64ListInternal(int fieldNumber, LongArrayList value, boolean packed) throws IOException { if (packed) { output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); // Compute and write the length of the data. int dataSize = 0; for (int i = 0; i < value.size(); ++i) { dataSize += CodedOutputStream.computeSInt64SizeNoTag(value.getLong(i)); } output.writeUInt32NoTag(dataSize); // Write the data itself, without any tags. for (int i = 0; i < value.size(); ++i) { output.writeSInt64NoTag(value.getLong(i)); } } else { for (int i = 0; i < value.size(); ++i) { output.writeSInt64(fieldNumber, value.getLong(i)); } } } private void writeSInt64ListInternal(int fieldNumber, List<Long> value, boolean packed) throws IOException { if (packed) { output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); // Compute and write the length of the data. int dataSize = 0; for (int i = 0; i < value.size(); ++i) { dataSize += CodedOutputStream.computeSInt64SizeNoTag(value.get(i)); } output.writeUInt32NoTag(dataSize); // Write the data itself, without any tags. for (int i = 0; i < value.size(); ++i) { output.writeSInt64NoTag(value.get(i)); } } else { for (int i = 0; i < value.size(); ++i) { output.writeSInt64(fieldNumber, value.get(i)); } } } @Override public void writeMessageList(int fieldNumber, List<?> value) throws IOException { for (int i = 0; i < value.size(); ++i) { writeMessage(fieldNumber, value.get(i)); } } @Override public void writeMessageList(int fieldNumber, List<?> value, Schema schema) throws IOException { for (int i = 0; i < value.size(); ++i) { writeMessage(fieldNumber, value.get(i), schema); } } @Deprecated @Override public void writeGroupList(int fieldNumber, List<?> value) throws IOException { for (int i = 0; i < value.size(); ++i) { writeGroup(fieldNumber, value.get(i)); } } @Override public void writeGroupList(int fieldNumber, List<?> value, Schema schema) throws IOException { for (int i = 0; i < value.size(); ++i) { writeGroup(fieldNumber, value.get(i), schema); } } @Override public <K, V> void writeMap(int fieldNumber, MapEntryLite.Metadata<K, V> metadata, Map<K, V> map) throws IOException { if (output.isSerializationDeterministic()) { writeDeterministicMap(fieldNumber, metadata, map); return; } for (Map.Entry<K, V> entry : map.entrySet()) { output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); output.writeUInt32NoTag( MapEntryLite.computeSerializedSize(metadata, entry.getKey(), entry.getValue())); MapEntryLite.writeTo(output, metadata, entry.getKey(), entry.getValue()); } } @SuppressWarnings("unchecked") private <K, V> void writeDeterministicMap( int fieldNumber, MapEntryLite.Metadata<K, V> metadata, Map<K, V> map) throws IOException { switch (metadata.keyType) { case BOOL: V value; if ((value = map.get(Boolean.FALSE)) != null) { writeDeterministicBooleanMapEntry( fieldNumber, /* key= */ false, value, (MapEntryLite.Metadata<Boolean, V>) metadata); } if ((value = map.get(Boolean.TRUE)) != null) { writeDeterministicBooleanMapEntry( fieldNumber, /* key= */ true, value, (MapEntryLite.Metadata<Boolean, V>) metadata); } break; case FIXED32: case INT32: case SFIXED32: case SINT32: case UINT32: writeDeterministicIntegerMap( fieldNumber, (MapEntryLite.Metadata<Integer, V>) metadata, (Map<Integer, V>) map); break; case FIXED64: case INT64: case SFIXED64: case SINT64: case UINT64: writeDeterministicLongMap( fieldNumber, (MapEntryLite.Metadata<Long, V>) metadata, (Map<Long, V>) map); break; case STRING: writeDeterministicStringMap( fieldNumber, (MapEntryLite.Metadata<String, V>) metadata, (Map<String, V>) map); break; default: throw new IllegalArgumentException("does not support key type: " + metadata.keyType); } } private <V> void writeDeterministicBooleanMapEntry( int fieldNumber, boolean key, V value, MapEntryLite.Metadata<Boolean, V> metadata) throws IOException { output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); output.writeUInt32NoTag(MapEntryLite.computeSerializedSize(metadata, key, value)); MapEntryLite.writeTo(output, metadata, key, value); } private <V> void writeDeterministicIntegerMap( int fieldNumber, MapEntryLite.Metadata<Integer, V> metadata, Map<Integer, V> map) throws IOException { int[] keys = new int[map.size()]; int index = 0; for (int k : map.keySet()) { keys[index++] = k; } Arrays.sort(keys); for (int key : keys) { V value = map.get(key); output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); output.writeUInt32NoTag(MapEntryLite.computeSerializedSize(metadata, key, value)); MapEntryLite.writeTo(output, metadata, key, value); } } private <V> void writeDeterministicLongMap( int fieldNumber, MapEntryLite.Metadata<Long, V> metadata, Map<Long, V> map) throws IOException { long[] keys = new long[map.size()]; int index = 0; for (long k : map.keySet()) { keys[index++] = k; } Arrays.sort(keys); for (long key : keys) { V value = map.get(key); output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); output.writeUInt32NoTag(MapEntryLite.computeSerializedSize(metadata, key, value)); MapEntryLite.writeTo(output, metadata, key, value); } } private <V> void writeDeterministicStringMap( int fieldNumber, MapEntryLite.Metadata<String, V> metadata, Map<String, V> map) throws IOException { String[] keys = new String[map.size()]; int index = 0; for (String k : map.keySet()) { keys[index++] = k; } Arrays.sort(keys); for (String key : keys) { V value = map.get(key); output.writeTag(fieldNumber, WIRETYPE_LENGTH_DELIMITED); output.writeUInt32NoTag(MapEntryLite.computeSerializedSize(metadata, key, value)); MapEntryLite.writeTo(output, metadata, key, value); } } }
googleapis/google-cloud-java
36,008
java-document-ai/proto-google-cloud-document-ai-v1beta3/src/main/java/com/google/cloud/documentai/v1beta3/CreateProcessorRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/documentai/v1beta3/document_processor_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.documentai.v1beta3; /** * * * <pre> * Request message for the * [CreateProcessor][google.cloud.documentai.v1beta3.DocumentProcessorService.CreateProcessor] * method. Notice this request is sent to a regionalized backend service. If the * [ProcessorType][google.cloud.documentai.v1beta3.ProcessorType] isn't * available in that region, the creation fails. * </pre> * * Protobuf type {@code google.cloud.documentai.v1beta3.CreateProcessorRequest} */ public final class CreateProcessorRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.documentai.v1beta3.CreateProcessorRequest) CreateProcessorRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateProcessorRequest.newBuilder() to construct. private CreateProcessorRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateProcessorRequest() { parent_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateProcessorRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.documentai.v1beta3.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1beta3_CreateProcessorRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.documentai.v1beta3.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1beta3_CreateProcessorRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.documentai.v1beta3.CreateProcessorRequest.class, com.google.cloud.documentai.v1beta3.CreateProcessorRequest.Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The parent (project and location) under which to create the * processor. Format: `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The parent (project and location) under which to create the * processor. Format: `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PROCESSOR_FIELD_NUMBER = 2; private com.google.cloud.documentai.v1beta3.Processor processor_; /** * * * <pre> * Required. The processor to be created, requires * [Processor.type][google.cloud.documentai.v1beta3.Processor.type] and * [Processor.display_name][google.cloud.documentai.v1beta3.Processor.display_name] * to be set. Also, the * [Processor.kms_key_name][google.cloud.documentai.v1beta3.Processor.kms_key_name] * field must be set if the processor is under CMEK. * </pre> * * <code> * .google.cloud.documentai.v1beta3.Processor processor = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the processor field is set. */ @java.lang.Override public boolean hasProcessor() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The processor to be created, requires * [Processor.type][google.cloud.documentai.v1beta3.Processor.type] and * [Processor.display_name][google.cloud.documentai.v1beta3.Processor.display_name] * to be set. Also, the * [Processor.kms_key_name][google.cloud.documentai.v1beta3.Processor.kms_key_name] * field must be set if the processor is under CMEK. * </pre> * * <code> * .google.cloud.documentai.v1beta3.Processor processor = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The processor. */ @java.lang.Override public com.google.cloud.documentai.v1beta3.Processor getProcessor() { return processor_ == null ? com.google.cloud.documentai.v1beta3.Processor.getDefaultInstance() : processor_; } /** * * * <pre> * Required. The processor to be created, requires * [Processor.type][google.cloud.documentai.v1beta3.Processor.type] and * [Processor.display_name][google.cloud.documentai.v1beta3.Processor.display_name] * to be set. Also, the * [Processor.kms_key_name][google.cloud.documentai.v1beta3.Processor.kms_key_name] * field must be set if the processor is under CMEK. * </pre> * * <code> * .google.cloud.documentai.v1beta3.Processor processor = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.documentai.v1beta3.ProcessorOrBuilder getProcessorOrBuilder() { return processor_ == null ? com.google.cloud.documentai.v1beta3.Processor.getDefaultInstance() : processor_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getProcessor()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getProcessor()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.documentai.v1beta3.CreateProcessorRequest)) { return super.equals(obj); } com.google.cloud.documentai.v1beta3.CreateProcessorRequest other = (com.google.cloud.documentai.v1beta3.CreateProcessorRequest) obj; if (!getParent().equals(other.getParent())) return false; if (hasProcessor() != other.hasProcessor()) return false; if (hasProcessor()) { if (!getProcessor().equals(other.getProcessor())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (hasProcessor()) { hash = (37 * hash) + PROCESSOR_FIELD_NUMBER; hash = (53 * hash) + getProcessor().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.documentai.v1beta3.CreateProcessorRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.documentai.v1beta3.CreateProcessorRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.documentai.v1beta3.CreateProcessorRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.documentai.v1beta3.CreateProcessorRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.documentai.v1beta3.CreateProcessorRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.documentai.v1beta3.CreateProcessorRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.documentai.v1beta3.CreateProcessorRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.documentai.v1beta3.CreateProcessorRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.documentai.v1beta3.CreateProcessorRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.documentai.v1beta3.CreateProcessorRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.documentai.v1beta3.CreateProcessorRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.documentai.v1beta3.CreateProcessorRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.documentai.v1beta3.CreateProcessorRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for the * [CreateProcessor][google.cloud.documentai.v1beta3.DocumentProcessorService.CreateProcessor] * method. Notice this request is sent to a regionalized backend service. If the * [ProcessorType][google.cloud.documentai.v1beta3.ProcessorType] isn't * available in that region, the creation fails. * </pre> * * Protobuf type {@code google.cloud.documentai.v1beta3.CreateProcessorRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.documentai.v1beta3.CreateProcessorRequest) com.google.cloud.documentai.v1beta3.CreateProcessorRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.documentai.v1beta3.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1beta3_CreateProcessorRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.documentai.v1beta3.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1beta3_CreateProcessorRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.documentai.v1beta3.CreateProcessorRequest.class, com.google.cloud.documentai.v1beta3.CreateProcessorRequest.Builder.class); } // Construct using com.google.cloud.documentai.v1beta3.CreateProcessorRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getProcessorFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; processor_ = null; if (processorBuilder_ != null) { processorBuilder_.dispose(); processorBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.documentai.v1beta3.DocumentAiProcessorService .internal_static_google_cloud_documentai_v1beta3_CreateProcessorRequest_descriptor; } @java.lang.Override public com.google.cloud.documentai.v1beta3.CreateProcessorRequest getDefaultInstanceForType() { return com.google.cloud.documentai.v1beta3.CreateProcessorRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.documentai.v1beta3.CreateProcessorRequest build() { com.google.cloud.documentai.v1beta3.CreateProcessorRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.documentai.v1beta3.CreateProcessorRequest buildPartial() { com.google.cloud.documentai.v1beta3.CreateProcessorRequest result = new com.google.cloud.documentai.v1beta3.CreateProcessorRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.documentai.v1beta3.CreateProcessorRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.processor_ = processorBuilder_ == null ? processor_ : processorBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.documentai.v1beta3.CreateProcessorRequest) { return mergeFrom((com.google.cloud.documentai.v1beta3.CreateProcessorRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.documentai.v1beta3.CreateProcessorRequest other) { if (other == com.google.cloud.documentai.v1beta3.CreateProcessorRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasProcessor()) { mergeProcessor(other.getProcessor()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getProcessorFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The parent (project and location) under which to create the * processor. Format: `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The parent (project and location) under which to create the * processor. Format: `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The parent (project and location) under which to create the * processor. Format: `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The parent (project and location) under which to create the * processor. Format: `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The parent (project and location) under which to create the * processor. Format: `projects/{project}/locations/{location}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.cloud.documentai.v1beta3.Processor processor_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.documentai.v1beta3.Processor, com.google.cloud.documentai.v1beta3.Processor.Builder, com.google.cloud.documentai.v1beta3.ProcessorOrBuilder> processorBuilder_; /** * * * <pre> * Required. The processor to be created, requires * [Processor.type][google.cloud.documentai.v1beta3.Processor.type] and * [Processor.display_name][google.cloud.documentai.v1beta3.Processor.display_name] * to be set. Also, the * [Processor.kms_key_name][google.cloud.documentai.v1beta3.Processor.kms_key_name] * field must be set if the processor is under CMEK. * </pre> * * <code> * .google.cloud.documentai.v1beta3.Processor processor = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the processor field is set. */ public boolean hasProcessor() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The processor to be created, requires * [Processor.type][google.cloud.documentai.v1beta3.Processor.type] and * [Processor.display_name][google.cloud.documentai.v1beta3.Processor.display_name] * to be set. Also, the * [Processor.kms_key_name][google.cloud.documentai.v1beta3.Processor.kms_key_name] * field must be set if the processor is under CMEK. * </pre> * * <code> * .google.cloud.documentai.v1beta3.Processor processor = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The processor. */ public com.google.cloud.documentai.v1beta3.Processor getProcessor() { if (processorBuilder_ == null) { return processor_ == null ? com.google.cloud.documentai.v1beta3.Processor.getDefaultInstance() : processor_; } else { return processorBuilder_.getMessage(); } } /** * * * <pre> * Required. The processor to be created, requires * [Processor.type][google.cloud.documentai.v1beta3.Processor.type] and * [Processor.display_name][google.cloud.documentai.v1beta3.Processor.display_name] * to be set. Also, the * [Processor.kms_key_name][google.cloud.documentai.v1beta3.Processor.kms_key_name] * field must be set if the processor is under CMEK. * </pre> * * <code> * .google.cloud.documentai.v1beta3.Processor processor = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setProcessor(com.google.cloud.documentai.v1beta3.Processor value) { if (processorBuilder_ == null) { if (value == null) { throw new NullPointerException(); } processor_ = value; } else { processorBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The processor to be created, requires * [Processor.type][google.cloud.documentai.v1beta3.Processor.type] and * [Processor.display_name][google.cloud.documentai.v1beta3.Processor.display_name] * to be set. Also, the * [Processor.kms_key_name][google.cloud.documentai.v1beta3.Processor.kms_key_name] * field must be set if the processor is under CMEK. * </pre> * * <code> * .google.cloud.documentai.v1beta3.Processor processor = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setProcessor( com.google.cloud.documentai.v1beta3.Processor.Builder builderForValue) { if (processorBuilder_ == null) { processor_ = builderForValue.build(); } else { processorBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The processor to be created, requires * [Processor.type][google.cloud.documentai.v1beta3.Processor.type] and * [Processor.display_name][google.cloud.documentai.v1beta3.Processor.display_name] * to be set. Also, the * [Processor.kms_key_name][google.cloud.documentai.v1beta3.Processor.kms_key_name] * field must be set if the processor is under CMEK. * </pre> * * <code> * .google.cloud.documentai.v1beta3.Processor processor = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeProcessor(com.google.cloud.documentai.v1beta3.Processor value) { if (processorBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && processor_ != null && processor_ != com.google.cloud.documentai.v1beta3.Processor.getDefaultInstance()) { getProcessorBuilder().mergeFrom(value); } else { processor_ = value; } } else { processorBuilder_.mergeFrom(value); } if (processor_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The processor to be created, requires * [Processor.type][google.cloud.documentai.v1beta3.Processor.type] and * [Processor.display_name][google.cloud.documentai.v1beta3.Processor.display_name] * to be set. Also, the * [Processor.kms_key_name][google.cloud.documentai.v1beta3.Processor.kms_key_name] * field must be set if the processor is under CMEK. * </pre> * * <code> * .google.cloud.documentai.v1beta3.Processor processor = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearProcessor() { bitField0_ = (bitField0_ & ~0x00000002); processor_ = null; if (processorBuilder_ != null) { processorBuilder_.dispose(); processorBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The processor to be created, requires * [Processor.type][google.cloud.documentai.v1beta3.Processor.type] and * [Processor.display_name][google.cloud.documentai.v1beta3.Processor.display_name] * to be set. Also, the * [Processor.kms_key_name][google.cloud.documentai.v1beta3.Processor.kms_key_name] * field must be set if the processor is under CMEK. * </pre> * * <code> * .google.cloud.documentai.v1beta3.Processor processor = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.documentai.v1beta3.Processor.Builder getProcessorBuilder() { bitField0_ |= 0x00000002; onChanged(); return getProcessorFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The processor to be created, requires * [Processor.type][google.cloud.documentai.v1beta3.Processor.type] and * [Processor.display_name][google.cloud.documentai.v1beta3.Processor.display_name] * to be set. Also, the * [Processor.kms_key_name][google.cloud.documentai.v1beta3.Processor.kms_key_name] * field must be set if the processor is under CMEK. * </pre> * * <code> * .google.cloud.documentai.v1beta3.Processor processor = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.documentai.v1beta3.ProcessorOrBuilder getProcessorOrBuilder() { if (processorBuilder_ != null) { return processorBuilder_.getMessageOrBuilder(); } else { return processor_ == null ? com.google.cloud.documentai.v1beta3.Processor.getDefaultInstance() : processor_; } } /** * * * <pre> * Required. The processor to be created, requires * [Processor.type][google.cloud.documentai.v1beta3.Processor.type] and * [Processor.display_name][google.cloud.documentai.v1beta3.Processor.display_name] * to be set. Also, the * [Processor.kms_key_name][google.cloud.documentai.v1beta3.Processor.kms_key_name] * field must be set if the processor is under CMEK. * </pre> * * <code> * .google.cloud.documentai.v1beta3.Processor processor = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.documentai.v1beta3.Processor, com.google.cloud.documentai.v1beta3.Processor.Builder, com.google.cloud.documentai.v1beta3.ProcessorOrBuilder> getProcessorFieldBuilder() { if (processorBuilder_ == null) { processorBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.documentai.v1beta3.Processor, com.google.cloud.documentai.v1beta3.Processor.Builder, com.google.cloud.documentai.v1beta3.ProcessorOrBuilder>( getProcessor(), getParentForChildren(), isClean()); processor_ = null; } return processorBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.documentai.v1beta3.CreateProcessorRequest) } // @@protoc_insertion_point(class_scope:google.cloud.documentai.v1beta3.CreateProcessorRequest) private static final com.google.cloud.documentai.v1beta3.CreateProcessorRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.documentai.v1beta3.CreateProcessorRequest(); } public static com.google.cloud.documentai.v1beta3.CreateProcessorRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateProcessorRequest> PARSER = new com.google.protobuf.AbstractParser<CreateProcessorRequest>() { @java.lang.Override public CreateProcessorRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateProcessorRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateProcessorRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.documentai.v1beta3.CreateProcessorRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,875
java-securityposture/proto-google-cloud-securityposture-v1/src/main/java/com/google/cloud/securityposture/v1/ListPostureDeploymentsRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/securityposture/v1/securityposture.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.securityposture.v1; /** * * * <pre> * Message for requesting list of PostureDeployments. * </pre> * * Protobuf type {@code google.cloud.securityposture.v1.ListPostureDeploymentsRequest} */ public final class ListPostureDeploymentsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.securityposture.v1.ListPostureDeploymentsRequest) ListPostureDeploymentsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListPostureDeploymentsRequest.newBuilder() to construct. private ListPostureDeploymentsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListPostureDeploymentsRequest() { parent_ = ""; pageToken_ = ""; filter_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListPostureDeploymentsRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securityposture.v1.V1mainProto .internal_static_google_cloud_securityposture_v1_ListPostureDeploymentsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securityposture.v1.V1mainProto .internal_static_google_cloud_securityposture_v1_ListPostureDeploymentsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest.class, com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. Parent value for ListPostureDeploymentsRequest. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. Parent value for ListPostureDeploymentsRequest. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_ = 0; /** * * * <pre> * Optional. Requested page size. Server may return fewer items than * requested. If unspecified, server will pick an appropriate default. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * Optional. A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILTER_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object filter_ = ""; /** * * * <pre> * Optional. Filter to be applied on the resource, defined by EBNF grammar * https://google.aip.dev/assets/misc/ebnf-filtering.txt. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ @java.lang.Override public java.lang.String getFilter() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } } /** * * * <pre> * Optional. Filter to be applied on the resource, defined by EBNF grammar * https://google.aip.dev/assets/misc/ebnf-filtering.txt. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ @java.lang.Override public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest)) { return super.equals(obj); } com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest other = (com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest) obj; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getFilter().equals(other.getFilter())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message for requesting list of PostureDeployments. * </pre> * * Protobuf type {@code google.cloud.securityposture.v1.ListPostureDeploymentsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.securityposture.v1.ListPostureDeploymentsRequest) com.google.cloud.securityposture.v1.ListPostureDeploymentsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securityposture.v1.V1mainProto .internal_static_google_cloud_securityposture_v1_ListPostureDeploymentsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securityposture.v1.V1mainProto .internal_static_google_cloud_securityposture_v1_ListPostureDeploymentsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest.class, com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest.Builder.class); } // Construct using // com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; pageSize_ = 0; pageToken_ = ""; filter_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.securityposture.v1.V1mainProto .internal_static_google_cloud_securityposture_v1_ListPostureDeploymentsRequest_descriptor; } @java.lang.Override public com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest getDefaultInstanceForType() { return com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest build() { com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest buildPartial() { com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest result = new com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageToken_ = pageToken_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.filter_ = filter_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest) { return mergeFrom((com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest other) { if (other == com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest.getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getFilter().isEmpty()) { filter_ = other.filter_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { filter_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. Parent value for ListPostureDeploymentsRequest. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Parent value for ListPostureDeploymentsRequest. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Parent value for ListPostureDeploymentsRequest. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. Parent value for ListPostureDeploymentsRequest. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. Parent value for ListPostureDeploymentsRequest. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Optional. Requested page size. Server may return fewer items than * requested. If unspecified, server will pick an appropriate default. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Optional. Requested page size. Server may return fewer items than * requested. If unspecified, server will pick an appropriate default. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Requested page size. Server may return fewer items than * requested. If unspecified, server will pick an appropriate default. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000002); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Optional. A token identifying a page of results the server should return. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object filter_ = ""; /** * * * <pre> * Optional. Filter to be applied on the resource, defined by EBNF grammar * https://google.aip.dev/assets/misc/ebnf-filtering.txt. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ public java.lang.String getFilter() { java.lang.Object ref = filter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. Filter to be applied on the resource, defined by EBNF grammar * https://google.aip.dev/assets/misc/ebnf-filtering.txt. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. Filter to be applied on the resource, defined by EBNF grammar * https://google.aip.dev/assets/misc/ebnf-filtering.txt. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The filter to set. * @return This builder for chaining. */ public Builder setFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } filter_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Optional. Filter to be applied on the resource, defined by EBNF grammar * https://google.aip.dev/assets/misc/ebnf-filtering.txt. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearFilter() { filter_ = getDefaultInstance().getFilter(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Optional. Filter to be applied on the resource, defined by EBNF grammar * https://google.aip.dev/assets/misc/ebnf-filtering.txt. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for filter to set. * @return This builder for chaining. */ public Builder setFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); filter_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.securityposture.v1.ListPostureDeploymentsRequest) } // @@protoc_insertion_point(class_scope:google.cloud.securityposture.v1.ListPostureDeploymentsRequest) private static final com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest(); } public static com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListPostureDeploymentsRequest> PARSER = new com.google.protobuf.AbstractParser<ListPostureDeploymentsRequest>() { @java.lang.Override public ListPostureDeploymentsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListPostureDeploymentsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListPostureDeploymentsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.securityposture.v1.ListPostureDeploymentsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,927
java-texttospeech/proto-google-cloud-texttospeech-v1/src/main/java/com/google/cloud/texttospeech/v1/SynthesizeLongAudioMetadata.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/texttospeech/v1/cloud_tts_lrs.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.texttospeech.v1; /** * * * <pre> * Metadata for response returned by the `SynthesizeLongAudio` method. * </pre> * * Protobuf type {@code google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata} */ public final class SynthesizeLongAudioMetadata extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata) SynthesizeLongAudioMetadataOrBuilder { private static final long serialVersionUID = 0L; // Use SynthesizeLongAudioMetadata.newBuilder() to construct. private SynthesizeLongAudioMetadata(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SynthesizeLongAudioMetadata() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new SynthesizeLongAudioMetadata(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.texttospeech.v1.TextToSpeechLongAudioSynthesisProto .internal_static_google_cloud_texttospeech_v1_SynthesizeLongAudioMetadata_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.texttospeech.v1.TextToSpeechLongAudioSynthesisProto .internal_static_google_cloud_texttospeech_v1_SynthesizeLongAudioMetadata_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata.class, com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata.Builder.class); } private int bitField0_; public static final int START_TIME_FIELD_NUMBER = 1; private com.google.protobuf.Timestamp startTime_; /** * * * <pre> * Time when the request was received. * </pre> * * <code>.google.protobuf.Timestamp start_time = 1;</code> * * @return Whether the startTime field is set. */ @java.lang.Override public boolean hasStartTime() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Time when the request was received. * </pre> * * <code>.google.protobuf.Timestamp start_time = 1;</code> * * @return The startTime. */ @java.lang.Override public com.google.protobuf.Timestamp getStartTime() { return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; } /** * * * <pre> * Time when the request was received. * </pre> * * <code>.google.protobuf.Timestamp start_time = 1;</code> */ @java.lang.Override public com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder() { return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; } public static final int LAST_UPDATE_TIME_FIELD_NUMBER = 2; private com.google.protobuf.Timestamp lastUpdateTime_; /** * * * <pre> * Deprecated. Do not use. * </pre> * * <code>.google.protobuf.Timestamp last_update_time = 2 [deprecated = true];</code> * * @deprecated google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata.last_update_time is * deprecated. See google/cloud/texttospeech/v1/cloud_tts_lrs.proto;l=84 * @return Whether the lastUpdateTime field is set. */ @java.lang.Override @java.lang.Deprecated public boolean hasLastUpdateTime() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Deprecated. Do not use. * </pre> * * <code>.google.protobuf.Timestamp last_update_time = 2 [deprecated = true];</code> * * @deprecated google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata.last_update_time is * deprecated. See google/cloud/texttospeech/v1/cloud_tts_lrs.proto;l=84 * @return The lastUpdateTime. */ @java.lang.Override @java.lang.Deprecated public com.google.protobuf.Timestamp getLastUpdateTime() { return lastUpdateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : lastUpdateTime_; } /** * * * <pre> * Deprecated. Do not use. * </pre> * * <code>.google.protobuf.Timestamp last_update_time = 2 [deprecated = true];</code> */ @java.lang.Override @java.lang.Deprecated public com.google.protobuf.TimestampOrBuilder getLastUpdateTimeOrBuilder() { return lastUpdateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : lastUpdateTime_; } public static final int PROGRESS_PERCENTAGE_FIELD_NUMBER = 3; private double progressPercentage_ = 0D; /** * * * <pre> * The progress of the most recent processing update in percentage, ie. 70.0%. * </pre> * * <code>double progress_percentage = 3;</code> * * @return The progressPercentage. */ @java.lang.Override public double getProgressPercentage() { return progressPercentage_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getStartTime()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getLastUpdateTime()); } if (java.lang.Double.doubleToRawLongBits(progressPercentage_) != 0) { output.writeDouble(3, progressPercentage_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getStartTime()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getLastUpdateTime()); } if (java.lang.Double.doubleToRawLongBits(progressPercentage_) != 0) { size += com.google.protobuf.CodedOutputStream.computeDoubleSize(3, progressPercentage_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata)) { return super.equals(obj); } com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata other = (com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata) obj; if (hasStartTime() != other.hasStartTime()) return false; if (hasStartTime()) { if (!getStartTime().equals(other.getStartTime())) return false; } if (hasLastUpdateTime() != other.hasLastUpdateTime()) return false; if (hasLastUpdateTime()) { if (!getLastUpdateTime().equals(other.getLastUpdateTime())) return false; } if (java.lang.Double.doubleToLongBits(getProgressPercentage()) != java.lang.Double.doubleToLongBits(other.getProgressPercentage())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasStartTime()) { hash = (37 * hash) + START_TIME_FIELD_NUMBER; hash = (53 * hash) + getStartTime().hashCode(); } if (hasLastUpdateTime()) { hash = (37 * hash) + LAST_UPDATE_TIME_FIELD_NUMBER; hash = (53 * hash) + getLastUpdateTime().hashCode(); } hash = (37 * hash) + PROGRESS_PERCENTAGE_FIELD_NUMBER; hash = (53 * hash) + com.google.protobuf.Internal.hashLong( java.lang.Double.doubleToLongBits(getProgressPercentage())); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Metadata for response returned by the `SynthesizeLongAudio` method. * </pre> * * Protobuf type {@code google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata) com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadataOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.texttospeech.v1.TextToSpeechLongAudioSynthesisProto .internal_static_google_cloud_texttospeech_v1_SynthesizeLongAudioMetadata_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.texttospeech.v1.TextToSpeechLongAudioSynthesisProto .internal_static_google_cloud_texttospeech_v1_SynthesizeLongAudioMetadata_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata.class, com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata.Builder.class); } // Construct using com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getStartTimeFieldBuilder(); getLastUpdateTimeFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; startTime_ = null; if (startTimeBuilder_ != null) { startTimeBuilder_.dispose(); startTimeBuilder_ = null; } lastUpdateTime_ = null; if (lastUpdateTimeBuilder_ != null) { lastUpdateTimeBuilder_.dispose(); lastUpdateTimeBuilder_ = null; } progressPercentage_ = 0D; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.texttospeech.v1.TextToSpeechLongAudioSynthesisProto .internal_static_google_cloud_texttospeech_v1_SynthesizeLongAudioMetadata_descriptor; } @java.lang.Override public com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata getDefaultInstanceForType() { return com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata.getDefaultInstance(); } @java.lang.Override public com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata build() { com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata buildPartial() { com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata result = new com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.startTime_ = startTimeBuilder_ == null ? startTime_ : startTimeBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.lastUpdateTime_ = lastUpdateTimeBuilder_ == null ? lastUpdateTime_ : lastUpdateTimeBuilder_.build(); to_bitField0_ |= 0x00000002; } if (((from_bitField0_ & 0x00000004) != 0)) { result.progressPercentage_ = progressPercentage_; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata) { return mergeFrom((com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata other) { if (other == com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata.getDefaultInstance()) return this; if (other.hasStartTime()) { mergeStartTime(other.getStartTime()); } if (other.hasLastUpdateTime()) { mergeLastUpdateTime(other.getLastUpdateTime()); } if (other.getProgressPercentage() != 0D) { setProgressPercentage(other.getProgressPercentage()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getStartTimeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getLastUpdateTimeFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 25: { progressPercentage_ = input.readDouble(); bitField0_ |= 0x00000004; break; } // case 25 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.protobuf.Timestamp startTime_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> startTimeBuilder_; /** * * * <pre> * Time when the request was received. * </pre> * * <code>.google.protobuf.Timestamp start_time = 1;</code> * * @return Whether the startTime field is set. */ public boolean hasStartTime() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Time when the request was received. * </pre> * * <code>.google.protobuf.Timestamp start_time = 1;</code> * * @return The startTime. */ public com.google.protobuf.Timestamp getStartTime() { if (startTimeBuilder_ == null) { return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; } else { return startTimeBuilder_.getMessage(); } } /** * * * <pre> * Time when the request was received. * </pre> * * <code>.google.protobuf.Timestamp start_time = 1;</code> */ public Builder setStartTime(com.google.protobuf.Timestamp value) { if (startTimeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } startTime_ = value; } else { startTimeBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Time when the request was received. * </pre> * * <code>.google.protobuf.Timestamp start_time = 1;</code> */ public Builder setStartTime(com.google.protobuf.Timestamp.Builder builderForValue) { if (startTimeBuilder_ == null) { startTime_ = builderForValue.build(); } else { startTimeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Time when the request was received. * </pre> * * <code>.google.protobuf.Timestamp start_time = 1;</code> */ public Builder mergeStartTime(com.google.protobuf.Timestamp value) { if (startTimeBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && startTime_ != null && startTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { getStartTimeBuilder().mergeFrom(value); } else { startTime_ = value; } } else { startTimeBuilder_.mergeFrom(value); } if (startTime_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Time when the request was received. * </pre> * * <code>.google.protobuf.Timestamp start_time = 1;</code> */ public Builder clearStartTime() { bitField0_ = (bitField0_ & ~0x00000001); startTime_ = null; if (startTimeBuilder_ != null) { startTimeBuilder_.dispose(); startTimeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Time when the request was received. * </pre> * * <code>.google.protobuf.Timestamp start_time = 1;</code> */ public com.google.protobuf.Timestamp.Builder getStartTimeBuilder() { bitField0_ |= 0x00000001; onChanged(); return getStartTimeFieldBuilder().getBuilder(); } /** * * * <pre> * Time when the request was received. * </pre> * * <code>.google.protobuf.Timestamp start_time = 1;</code> */ public com.google.protobuf.TimestampOrBuilder getStartTimeOrBuilder() { if (startTimeBuilder_ != null) { return startTimeBuilder_.getMessageOrBuilder(); } else { return startTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : startTime_; } } /** * * * <pre> * Time when the request was received. * </pre> * * <code>.google.protobuf.Timestamp start_time = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> getStartTimeFieldBuilder() { if (startTimeBuilder_ == null) { startTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( getStartTime(), getParentForChildren(), isClean()); startTime_ = null; } return startTimeBuilder_; } private com.google.protobuf.Timestamp lastUpdateTime_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> lastUpdateTimeBuilder_; /** * * * <pre> * Deprecated. Do not use. * </pre> * * <code>.google.protobuf.Timestamp last_update_time = 2 [deprecated = true];</code> * * @deprecated google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata.last_update_time is * deprecated. See google/cloud/texttospeech/v1/cloud_tts_lrs.proto;l=84 * @return Whether the lastUpdateTime field is set. */ @java.lang.Deprecated public boolean hasLastUpdateTime() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Deprecated. Do not use. * </pre> * * <code>.google.protobuf.Timestamp last_update_time = 2 [deprecated = true];</code> * * @deprecated google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata.last_update_time is * deprecated. See google/cloud/texttospeech/v1/cloud_tts_lrs.proto;l=84 * @return The lastUpdateTime. */ @java.lang.Deprecated public com.google.protobuf.Timestamp getLastUpdateTime() { if (lastUpdateTimeBuilder_ == null) { return lastUpdateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : lastUpdateTime_; } else { return lastUpdateTimeBuilder_.getMessage(); } } /** * * * <pre> * Deprecated. Do not use. * </pre> * * <code>.google.protobuf.Timestamp last_update_time = 2 [deprecated = true];</code> */ @java.lang.Deprecated public Builder setLastUpdateTime(com.google.protobuf.Timestamp value) { if (lastUpdateTimeBuilder_ == null) { if (value == null) { throw new NullPointerException(); } lastUpdateTime_ = value; } else { lastUpdateTimeBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Deprecated. Do not use. * </pre> * * <code>.google.protobuf.Timestamp last_update_time = 2 [deprecated = true];</code> */ @java.lang.Deprecated public Builder setLastUpdateTime(com.google.protobuf.Timestamp.Builder builderForValue) { if (lastUpdateTimeBuilder_ == null) { lastUpdateTime_ = builderForValue.build(); } else { lastUpdateTimeBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Deprecated. Do not use. * </pre> * * <code>.google.protobuf.Timestamp last_update_time = 2 [deprecated = true];</code> */ @java.lang.Deprecated public Builder mergeLastUpdateTime(com.google.protobuf.Timestamp value) { if (lastUpdateTimeBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && lastUpdateTime_ != null && lastUpdateTime_ != com.google.protobuf.Timestamp.getDefaultInstance()) { getLastUpdateTimeBuilder().mergeFrom(value); } else { lastUpdateTime_ = value; } } else { lastUpdateTimeBuilder_.mergeFrom(value); } if (lastUpdateTime_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Deprecated. Do not use. * </pre> * * <code>.google.protobuf.Timestamp last_update_time = 2 [deprecated = true];</code> */ @java.lang.Deprecated public Builder clearLastUpdateTime() { bitField0_ = (bitField0_ & ~0x00000002); lastUpdateTime_ = null; if (lastUpdateTimeBuilder_ != null) { lastUpdateTimeBuilder_.dispose(); lastUpdateTimeBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Deprecated. Do not use. * </pre> * * <code>.google.protobuf.Timestamp last_update_time = 2 [deprecated = true];</code> */ @java.lang.Deprecated public com.google.protobuf.Timestamp.Builder getLastUpdateTimeBuilder() { bitField0_ |= 0x00000002; onChanged(); return getLastUpdateTimeFieldBuilder().getBuilder(); } /** * * * <pre> * Deprecated. Do not use. * </pre> * * <code>.google.protobuf.Timestamp last_update_time = 2 [deprecated = true];</code> */ @java.lang.Deprecated public com.google.protobuf.TimestampOrBuilder getLastUpdateTimeOrBuilder() { if (lastUpdateTimeBuilder_ != null) { return lastUpdateTimeBuilder_.getMessageOrBuilder(); } else { return lastUpdateTime_ == null ? com.google.protobuf.Timestamp.getDefaultInstance() : lastUpdateTime_; } } /** * * * <pre> * Deprecated. Do not use. * </pre> * * <code>.google.protobuf.Timestamp last_update_time = 2 [deprecated = true];</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder> getLastUpdateTimeFieldBuilder() { if (lastUpdateTimeBuilder_ == null) { lastUpdateTimeBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.Timestamp, com.google.protobuf.Timestamp.Builder, com.google.protobuf.TimestampOrBuilder>( getLastUpdateTime(), getParentForChildren(), isClean()); lastUpdateTime_ = null; } return lastUpdateTimeBuilder_; } private double progressPercentage_; /** * * * <pre> * The progress of the most recent processing update in percentage, ie. 70.0%. * </pre> * * <code>double progress_percentage = 3;</code> * * @return The progressPercentage. */ @java.lang.Override public double getProgressPercentage() { return progressPercentage_; } /** * * * <pre> * The progress of the most recent processing update in percentage, ie. 70.0%. * </pre> * * <code>double progress_percentage = 3;</code> * * @param value The progressPercentage to set. * @return This builder for chaining. */ public Builder setProgressPercentage(double value) { progressPercentage_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The progress of the most recent processing update in percentage, ie. 70.0%. * </pre> * * <code>double progress_percentage = 3;</code> * * @return This builder for chaining. */ public Builder clearProgressPercentage() { bitField0_ = (bitField0_ & ~0x00000004); progressPercentage_ = 0D; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata) } // @@protoc_insertion_point(class_scope:google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata) private static final com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata(); } public static com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SynthesizeLongAudioMetadata> PARSER = new com.google.protobuf.AbstractParser<SynthesizeLongAudioMetadata>() { @java.lang.Override public SynthesizeLongAudioMetadata parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<SynthesizeLongAudioMetadata> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SynthesizeLongAudioMetadata> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.texttospeech.v1.SynthesizeLongAudioMetadata getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/flink
36,072
flink-tests/src/test/java/org/apache/flink/test/checkpointing/UnalignedCheckpointRescaleITCase.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.flink.test.checkpointing; import org.apache.flink.api.common.JobExecutionResult; import org.apache.flink.api.common.accumulators.LongCounter; import org.apache.flink.api.common.functions.FilterFunction; import org.apache.flink.api.common.functions.MapFunction; import org.apache.flink.api.common.functions.OpenContext; import org.apache.flink.api.common.functions.Partitioner; import org.apache.flink.api.common.functions.RichMapFunction; import org.apache.flink.api.common.state.ListState; import org.apache.flink.api.common.state.ListStateDescriptor; import org.apache.flink.api.common.state.MapStateDescriptor; import org.apache.flink.api.common.state.ValueState; import org.apache.flink.api.common.state.ValueStateDescriptor; import org.apache.flink.api.common.typeinfo.BasicTypeInfo; import org.apache.flink.api.common.typeinfo.Types; import org.apache.flink.api.common.typeutils.base.LongSerializer; import org.apache.flink.api.java.functions.KeySelector; import org.apache.flink.runtime.state.FunctionInitializationContext; import org.apache.flink.runtime.state.FunctionSnapshotContext; import org.apache.flink.streaming.api.checkpoint.CheckpointedFunction; import org.apache.flink.streaming.api.datastream.BroadcastStream; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction; import org.apache.flink.streaming.api.functions.co.CoMapFunction; import org.apache.flink.streaming.api.functions.co.KeyedBroadcastProcessFunction; import org.apache.flink.streaming.api.functions.co.KeyedCoProcessFunction; import org.apache.flink.streaming.api.functions.sink.legacy.SinkFunction; import org.apache.flink.util.Collector; import org.apache.commons.lang3.ArrayUtils; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.Parameterized; import java.io.File; import java.util.Arrays; import java.util.BitSet; import java.util.Collections; import static org.apache.flink.api.common.eventtime.WatermarkStrategy.noWatermarks; import static org.apache.flink.util.Preconditions.checkState; import static org.hamcrest.Matchers.equalTo; /** Integration test for performing rescale of unaligned checkpoint. */ @RunWith(Parameterized.class) public class UnalignedCheckpointRescaleITCase extends UnalignedCheckpointTestBase { public static final int NUM_GROUPS = 100; private final Topology topology; private final int oldParallelism; private final int newParallelism; private final long sourceSleepMs; enum Topology implements DagCreator { PIPELINE { @Override public void create( StreamExecutionEnvironment env, int minCheckpoints, boolean slotSharing, int expectedRestarts, long sourceSleepMillis) { final int parallelism = env.getParallelism(); final DataStream<Long> source = createSourcePipeline( env, minCheckpoints, slotSharing, expectedRestarts, parallelism, 0, sourceSleepMillis, val -> true); addFailingSink(source, minCheckpoints, slotSharing); } }, MULTI_INPUT { @Override public void create( StreamExecutionEnvironment env, int minCheckpoints, boolean slotSharing, int expectedRestarts, long sourceSleepMs) { final int parallelism = env.getParallelism(); DataStream<Long> combinedSource = null; for (int inputIndex = 0; inputIndex < NUM_SOURCES; inputIndex++) { int finalInputIndex = inputIndex; final DataStream<Long> source = createSourcePipeline( env, minCheckpoints, slotSharing, expectedRestarts, parallelism, inputIndex, sourceSleepMs, val -> withoutHeader(val) % NUM_SOURCES == finalInputIndex); combinedSource = combinedSource == null ? source : combinedSource .connect(source) .map(new UnionLikeCoGroup()) .name("min" + inputIndex) .uid("min" + inputIndex) .slotSharingGroup( slotSharing ? "default" : ("min" + inputIndex)); } addFailingSink(combinedSource, minCheckpoints, slotSharing); } }, KEYED_DIFFERENT_PARALLELISM { @Override public void create( StreamExecutionEnvironment env, int minCheckpoints, boolean slotSharing, int expectedRestarts, long sourceSleepMs) { final int parallelism = env.getParallelism(); final DataStream<Long> source1 = createSourcePipeline( env, minCheckpoints, slotSharing, expectedRestarts, parallelism / 2, 0, sourceSleepMs, val -> withoutHeader(val) % 2 == 0); final DataStream<Long> source2 = createSourcePipeline( env, minCheckpoints, slotSharing, expectedRestarts, parallelism / 3, 1, sourceSleepMs, val -> withoutHeader(val) % 2 == 1); KeySelector<Long, Long> keySelector = i -> withoutHeader(i) % NUM_GROUPS; SingleOutputStreamOperator<Long> connected = source1.connect(source2) .keyBy(keySelector, keySelector) .process(new TestKeyedCoProcessFunction()) .setParallelism(parallelism); addFailingSink(connected, minCheckpoints, slotSharing); } }, UNION { @Override public void create( StreamExecutionEnvironment env, int minCheckpoints, boolean slotSharing, int expectedRestarts, long sourceSleepMs) { final int parallelism = env.getParallelism(); DataStream<Long> combinedSource = null; for (int inputIndex = 0; inputIndex < NUM_SOURCES; inputIndex++) { int finalInputIndex = inputIndex; final DataStream<Long> source = createSourcePipeline( env, minCheckpoints, slotSharing, expectedRestarts, parallelism, inputIndex, sourceSleepMs, val -> withoutHeader(val) % NUM_SOURCES == finalInputIndex); combinedSource = combinedSource == null ? source : combinedSource.union(source); } addFailingSink(combinedSource, minCheckpoints, slotSharing); } }, BROADCAST { @Override public void create( StreamExecutionEnvironment env, int minCheckpoints, boolean slotSharing, int expectedRestarts, long sourceSleepMs) { final int parallelism = env.getParallelism(); final DataStream<Long> broadcastSide = env.fromSource( new LongSource( minCheckpoints, parallelism, expectedRestarts, env.getCheckpointInterval(), sourceSleepMs), noWatermarks(), "source"); final DataStream<Long> source = createSourcePipeline( env, minCheckpoints, slotSharing, expectedRestarts, parallelism, 0, sourceSleepMs, val -> true) .map(i -> checkHeader(i)) .name("map") .uid("map") .slotSharingGroup(slotSharing ? "default" : "failing-map"); final MapStateDescriptor<Long, Long> descriptor = new MapStateDescriptor<>( "broadcast", BasicTypeInfo.LONG_TYPE_INFO, BasicTypeInfo.LONG_TYPE_INFO); final BroadcastStream<Long> broadcast = broadcastSide.broadcast(descriptor); final SingleOutputStreamOperator<Long> joined = source.connect(broadcast) .process(new TestBroadcastProcessFunction()) .setParallelism(2 * parallelism); addFailingSink(joined, minCheckpoints, slotSharing); } }, KEYED_BROADCAST { @Override public void create( StreamExecutionEnvironment env, int minCheckpoints, boolean slotSharing, int expectedRestarts, long sourceSleepMs) { final int parallelism = env.getParallelism(); final DataStream<Long> broadcastSide1 = env.fromSource( new LongSource( minCheckpoints, 1, expectedRestarts, env.getCheckpointInterval(), sourceSleepMs), noWatermarks(), "source-1") .setParallelism(1); final DataStream<Long> broadcastSide2 = env.fromSource( new LongSource( minCheckpoints, 1, expectedRestarts, env.getCheckpointInterval(), sourceSleepMs), noWatermarks(), "source-2") .setParallelism(1); final DataStream<Long> broadcastSide3 = env.fromSource( new LongSource( minCheckpoints, 1, expectedRestarts, env.getCheckpointInterval(), sourceSleepMs), noWatermarks(), "source-3") .setParallelism(1); final DataStream<Long> source = createSourcePipeline( env, minCheckpoints, slotSharing, expectedRestarts, parallelism, 0, sourceSleepMs, val -> true) .map(i -> checkHeader(i)) .name("map") .uid("map") .slotSharingGroup(slotSharing ? "default" : "failing-map"); final MapStateDescriptor<Long, Long> descriptor = new MapStateDescriptor<>( "broadcast", BasicTypeInfo.LONG_TYPE_INFO, BasicTypeInfo.LONG_TYPE_INFO); DataStream<Long> broadcastSide = broadcastSide1.union(broadcastSide2).union(broadcastSide3); final BroadcastStream<Long> broadcast = broadcastSide.broadcast(descriptor); final SingleOutputStreamOperator<Long> joined = source.keyBy(i -> withoutHeader(i) % NUM_GROUPS) .connect(broadcast) .process(new TestKeyedBroadcastProcessFunction()) .setParallelism(parallelism + 2); addFailingSink(joined, minCheckpoints, slotSharing); } }, CUSTOM_PARTITIONER { final int sinkParallelism = 3; @Override public void create( StreamExecutionEnvironment env, int minCheckpoints, boolean slotSharing, int expectedRestarts, long sourceSleepMs) { int parallelism = env.getParallelism(); env.fromSource( new LongSource( minCheckpoints, parallelism, expectedRestarts, env.getCheckpointInterval(), sourceSleepMs), noWatermarks(), "source") .name("source") .uid("source") .map( new MapFunction<Long, String>() { @Override public String map(Long value) throws Exception { value = withoutHeader(value); return buildString( value % sinkParallelism, value / sinkParallelism); } }) .name("long-to-string-map") .uid("long-to-string-map") .map( new FailingMapper<>( state -> false, state -> state.completedCheckpoints >= minCheckpoints / 2 && state.runNumber == 0, state -> false, state -> false)) .name("failing-map") .uid("failing-map") .setParallelism(parallelism) .partitionCustom(new StringPartitioner(), str -> str.split(" ")[0]) .addSink(new BackPressureInducingSink()) .name("sink") .uid("sink") .setParallelism(sinkParallelism); } private String buildString(long partition, long index) { String longStr = new String(new char[3713]).replace('\0', '\uFFFF'); return partition + " " + index + " " + longStr; } }; void addFailingSink( DataStream<Long> combinedSource, long minCheckpoints, boolean slotSharing) { combinedSource .shuffle() .map( new FailingMapper<>( state -> false, state -> state.completedCheckpoints >= minCheckpoints / 2 && state.runNumber == 0, state -> false, state -> false)) .name("failing-map") .uid("failing-map") .slotSharingGroup(slotSharing ? "default" : "failing-map") .shuffle() .addSink( new VerifyingSink( minCheckpoints, combinedSource .getExecutionEnvironment() .getCheckpointInterval())) .setParallelism(1) .name("sink") .uid("sink") .slotSharingGroup(slotSharing ? "default" : "sink"); } DataStream<Long> createSourcePipeline( StreamExecutionEnvironment env, int minCheckpoints, boolean slotSharing, int expectedRestarts, int parallelism, int inputIndex, long sourceSleepMs, FilterFunction<Long> sourceFilter) { return env.fromSource( new LongSource( minCheckpoints, parallelism, expectedRestarts, env.getCheckpointInterval(), sourceSleepMs), noWatermarks(), "source" + inputIndex) .uid("source" + inputIndex) .slotSharingGroup(slotSharing ? "default" : ("source" + inputIndex)) .filter(sourceFilter) .name("input-filter" + inputIndex) .uid("input-filter" + inputIndex) .slotSharingGroup(slotSharing ? "default" : ("source" + inputIndex)) .map(new InputCountFunction()) .name("input-counter" + inputIndex) .uid("input-counter" + inputIndex) .slotSharingGroup(slotSharing ? "default" : ("source" + inputIndex)) .global() .map(i -> checkHeader(i)) .name("global" + inputIndex) .uid("global" + inputIndex) .slotSharingGroup(slotSharing ? "default" : ("global" + inputIndex)) .rebalance() .map(i -> checkHeader(i)) .setParallelism(parallelism + 1) .name("rebalance" + inputIndex) .uid("rebalance" + inputIndex) .slotSharingGroup(slotSharing ? "default" : ("rebalance" + inputIndex)) .shuffle() .map(i -> checkHeader(i)) .name("upscale" + inputIndex) .uid("upscale" + inputIndex) .setParallelism(2 * parallelism) .slotSharingGroup(slotSharing ? "default" : ("upscale" + inputIndex)) .shuffle() .map(i -> checkHeader(i)) .name("downscale" + inputIndex) .uid("downscale" + inputIndex) .setParallelism(parallelism + 1) .slotSharingGroup(slotSharing ? "default" : ("downscale" + inputIndex)) .keyBy(i -> withoutHeader(i) % NUM_GROUPS) .map(new StatefulKeyedMap()) .name("keyby" + inputIndex) .uid("keyby" + inputIndex) .slotSharingGroup(slotSharing ? "default" : ("keyby" + inputIndex)) .rescale() .map(i -> checkHeader(i)) .name("rescale" + inputIndex) .uid("rescale" + inputIndex) .setParallelism(Math.max(parallelism + 1, parallelism * 3 / 2)) .slotSharingGroup(slotSharing ? "default" : ("rescale" + inputIndex)); } @Override public String toString() { return name().toLowerCase(); } private static class TestBroadcastProcessFunction extends BroadcastProcessFunction<Long, Long, Long> { private static final long serialVersionUID = 7852973507735751404L; TestBroadcastProcessFunction() {} @Override public void processElement(Long value, ReadOnlyContext ctx, Collector<Long> out) { out.collect(checkHeader(value)); } @Override public void processBroadcastElement(Long value, Context ctx, Collector<Long> out) {} } private static class TestKeyedCoProcessFunction extends KeyedCoProcessFunction<Long, Long, Long, Long> { private static final long serialVersionUID = 1L; TestKeyedCoProcessFunction() {} @Override public void processElement1(Long value, Context ctx, Collector<Long> out) throws Exception { out.collect(checkHeader(value)); } @Override public void processElement2(Long value, Context ctx, Collector<Long> out) throws Exception { out.collect(checkHeader(value)); } } private static class TestKeyedBroadcastProcessFunction extends KeyedBroadcastProcessFunction<Long, Long, Long, Long> { private static final long serialVersionUID = 7852973507735751404L; TestKeyedBroadcastProcessFunction() {} @Override public void processElement(Long value, ReadOnlyContext ctx, Collector<Long> out) { out.collect(checkHeader(value)); } @Override public void processBroadcastElement(Long value, Context ctx, Collector<Long> out) {} } } @Parameterized.Parameters(name = "{0} {1} from {2} to {3}, sourceSleepMs = {4}") public static Object[][] getScaleFactors() { // We use `sourceSleepMs` > 0 to test rescaling without backpressure and only very few // captured in-flight records, see FLINK-31963. Object[][] parameters = new Object[][] { new Object[] {"downscale", Topology.CUSTOM_PARTITIONER, 3, 2, 0L}, new Object[] {"downscale", Topology.KEYED_DIFFERENT_PARALLELISM, 12, 7, 0L}, new Object[] {"upscale", Topology.KEYED_DIFFERENT_PARALLELISM, 7, 12, 0L}, new Object[] {"downscale", Topology.KEYED_DIFFERENT_PARALLELISM, 5, 3, 5L}, new Object[] {"upscale", Topology.KEYED_DIFFERENT_PARALLELISM, 3, 5, 5L}, new Object[] {"downscale", Topology.KEYED_BROADCAST, 7, 2, 0L}, new Object[] {"upscale", Topology.KEYED_BROADCAST, 2, 7, 0L}, new Object[] {"downscale", Topology.KEYED_BROADCAST, 5, 3, 5L}, new Object[] {"upscale", Topology.KEYED_BROADCAST, 3, 5, 5L}, new Object[] {"downscale", Topology.BROADCAST, 5, 2, 0L}, new Object[] {"upscale", Topology.BROADCAST, 2, 5, 0L}, new Object[] {"downscale", Topology.BROADCAST, 5, 3, 5L}, new Object[] {"upscale", Topology.BROADCAST, 3, 5, 5L}, new Object[] {"upscale", Topology.PIPELINE, 1, 2, 0L}, new Object[] {"upscale", Topology.PIPELINE, 2, 3, 0L}, new Object[] {"upscale", Topology.PIPELINE, 3, 7, 0L}, new Object[] {"upscale", Topology.PIPELINE, 4, 8, 0L}, new Object[] {"upscale", Topology.PIPELINE, 20, 21, 0L}, new Object[] {"upscale", Topology.PIPELINE, 3, 5, 5L}, new Object[] {"downscale", Topology.PIPELINE, 2, 1, 0L}, new Object[] {"downscale", Topology.PIPELINE, 3, 2, 0L}, new Object[] {"downscale", Topology.PIPELINE, 7, 3, 0L}, new Object[] {"downscale", Topology.PIPELINE, 8, 4, 0L}, new Object[] {"downscale", Topology.PIPELINE, 21, 20, 0L}, new Object[] {"downscale", Topology.PIPELINE, 5, 3, 5L}, new Object[] {"no scale", Topology.PIPELINE, 1, 1, 0L}, new Object[] {"no scale", Topology.PIPELINE, 3, 3, 0L}, new Object[] {"no scale", Topology.PIPELINE, 7, 7, 0L}, new Object[] {"no scale", Topology.PIPELINE, 20, 20, 0L}, new Object[] {"upscale", Topology.UNION, 1, 2, 0L}, new Object[] {"upscale", Topology.UNION, 2, 3, 0L}, new Object[] {"upscale", Topology.UNION, 3, 7, 0L}, new Object[] {"upscale", Topology.UNION, 3, 5, 5L}, new Object[] {"downscale", Topology.UNION, 2, 1, 0L}, new Object[] {"downscale", Topology.UNION, 3, 2, 0L}, new Object[] {"downscale", Topology.UNION, 7, 3, 0L}, new Object[] {"downscale", Topology.UNION, 5, 3, 5L}, new Object[] {"no scale", Topology.UNION, 1, 1, 0L}, new Object[] {"no scale", Topology.UNION, 7, 7, 0L}, new Object[] {"upscale", Topology.MULTI_INPUT, 1, 2, 0L}, new Object[] {"upscale", Topology.MULTI_INPUT, 2, 3, 0L}, new Object[] {"upscale", Topology.MULTI_INPUT, 3, 7, 0L}, new Object[] {"upscale", Topology.MULTI_INPUT, 3, 5, 5L}, new Object[] {"downscale", Topology.MULTI_INPUT, 2, 1, 0L}, new Object[] {"downscale", Topology.MULTI_INPUT, 3, 2, 0L}, new Object[] {"downscale", Topology.MULTI_INPUT, 7, 3, 0L}, new Object[] {"downscale", Topology.MULTI_INPUT, 5, 3, 5L}, new Object[] {"no scale", Topology.MULTI_INPUT, 1, 1, 0L}, new Object[] {"no scale", Topology.MULTI_INPUT, 7, 7, 0L}, }; return Arrays.stream(parameters) .map(params -> new Object[][] {ArrayUtils.insert(params.length, params)}) .flatMap(Arrays::stream) .toArray(Object[][]::new); } public UnalignedCheckpointRescaleITCase( String desc, Topology topology, int oldParallelism, int newParallelism, long sourceSleepMs) { this.topology = topology; this.oldParallelism = oldParallelism; this.newParallelism = newParallelism; this.sourceSleepMs = sourceSleepMs; } @Test public void shouldRescaleUnalignedCheckpoint() throws Exception { final UnalignedSettings prescaleSettings = new UnalignedSettings(topology) .setParallelism(oldParallelism) .setExpectedFailures(1) .setSourceSleepMs(sourceSleepMs); prescaleSettings.setGenerateCheckpoint(true); final File checkpointDir = super.execute(prescaleSettings); // resume final UnalignedSettings postscaleSettings = new UnalignedSettings(topology) .setParallelism(newParallelism) .setExpectedFailures(1); postscaleSettings.setRestoreCheckpoint(checkpointDir); super.execute(postscaleSettings); } protected void checkCounters(JobExecutionResult result) { collector.checkThat( "NUM_OUTPUTS = NUM_INPUTS", result.<Long>getAccumulatorResult(NUM_OUTPUTS), equalTo(result.getAccumulatorResult(NUM_INPUTS))); if (!topology.equals(Topology.CUSTOM_PARTITIONER)) { collector.checkThat( "NUM_DUPLICATES", result.<Long>getAccumulatorResult(NUM_DUPLICATES), equalTo(0L)); } } /** * A sink that checks if the members arrive in the expected order without any missing values. */ protected static class VerifyingSink extends VerifyingSinkBase<VerifyingSink.State> { private boolean firstDuplicate = true; protected VerifyingSink(long minCheckpoints, long checkpointingInterval) { super(minCheckpoints, checkpointingInterval); } @Override protected State createState() { return new State(); } @Override public void invoke(Long value, Context context) throws Exception { final int intValue = (int) withoutHeader(value); if (state.encounteredNumbers.get(intValue)) { state.numDuplicates++; if (firstDuplicate) { LOG.info( "Duplicate record {} @ {} subtask ({} attempt)", intValue, getRuntimeContext().getTaskInfo().getIndexOfThisSubtask(), getRuntimeContext().getTaskInfo().getAttemptNumber()); firstDuplicate = false; } } state.encounteredNumbers.set(intValue); state.numOutput++; induceBackpressure(); } @Override public void close() throws Exception { state.numLostValues = state.encounteredNumbers.length() - state.encounteredNumbers.cardinality(); super.close(); } public static class State extends VerifyingSinkStateBase { private final BitSet encounteredNumbers = new BitSet(); } } private static class StatefulKeyedMap extends RichMapFunction<Long, Long> { private static final ValueStateDescriptor<Long> DESC = new ValueStateDescriptor<>("group", LongSerializer.INSTANCE); ValueState<Long> state; @Override public void open(OpenContext openContext) throws Exception { state = getRuntimeContext().getState(DESC); } @Override public Long map(Long value) throws Exception { final Long lastGroup = state.value(); final long rawValue = withoutHeader(value); final long group = rawValue % NUM_GROUPS; if (lastGroup != null) { checkState(group == lastGroup, "Mismatched key group"); } else { state.update(group); } return value; } } private static class InputCountFunction extends RichMapFunction<Long, Long> implements CheckpointedFunction { private static final long serialVersionUID = -1098571965968341646L; private final LongCounter numInputCounter = new LongCounter(); private ListState<Long> state; @Override public void open(OpenContext openContext) throws Exception { super.open(openContext); getRuntimeContext().addAccumulator(NUM_INPUTS, numInputCounter); } @Override public void initializeState(FunctionInitializationContext context) throws Exception { ListStateDescriptor<Long> descriptor = new ListStateDescriptor<>("num-inputs", Types.LONG); state = context.getOperatorStateStore().getListState(descriptor); for (Long numInputs : state.get()) { numInputCounter.add(numInputs); } } @Override public void snapshotState(FunctionSnapshotContext context) throws Exception { state.update(Collections.singletonList(numInputCounter.getLocalValue())); } @Override public Long map(Long value) throws Exception { numInputCounter.add(1L); return checkHeader(value); } } private static class UnionLikeCoGroup implements CoMapFunction<Long, Long, Long> { @Override public Long map1(Long value) throws Exception { return checkHeader(value); } @Override public Long map2(Long value) throws Exception { return checkHeader(value); } } private static class StringPartitioner implements Partitioner<String> { @Override public int partition(String key, int numPartitions) { return Integer.parseInt(key) % numPartitions; } } private static class BackPressureInducingSink<T> implements SinkFunction<T> { @Override public void invoke(T value, Context ctx) throws Exception { // TODO: maybe similarly to VerifyingSink, we should back pressure only until some point // but currently it doesn't seem to be needed (test runs quickly enough) Thread.sleep(1); } } }
apache/iotdb
35,814
integration-test/src/test/java/org/apache/iotdb/db/it/schema/IoTDBSchemaTemplateIT.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.iotdb.db.it.schema; import org.apache.iotdb.commons.schema.column.ColumnHeaderConstant; import org.apache.iotdb.it.env.EnvFactory; import org.apache.iotdb.itbase.category.ClusterIT; import org.apache.iotdb.itbase.category.LocalStandaloneIT; import org.apache.iotdb.rpc.TSStatusCode; import org.apache.iotdb.util.AbstractSchemaIT; import org.junit.After; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.runners.Parameterized; import java.sql.Connection; import java.sql.ResultSet; import java.sql.SQLException; import java.sql.Statement; import java.util.Arrays; import java.util.Collections; import java.util.HashSet; import java.util.Set; import static org.junit.Assert.fail; /** * Notice that, all test begins with "IoTDB" is integration test. All test which will start the * IoTDB server should be defined as integration test. */ @Category({LocalStandaloneIT.class, ClusterIT.class}) public class IoTDBSchemaTemplateIT extends AbstractSchemaIT { public IoTDBSchemaTemplateIT(SchemaTestMode schemaTestMode) { super(schemaTestMode); } @Parameterized.BeforeParam public static void before() throws Exception { setUpEnvironment(); EnvFactory.getEnv().initClusterEnvironment(); } @Parameterized.AfterParam public static void after() throws Exception { EnvFactory.getEnv().cleanClusterEnvironment(); tearDownEnvironment(); } @Before public void setUp() throws Exception { prepareTemplate(); } @After public void tearDown() throws Exception { clearSchema(); } private void prepareTemplate() throws SQLException { try (Connection connection = EnvFactory.getEnv().getConnection(); Statement statement = connection.createStatement()) { // create database statement.execute("CREATE DATABASE root.sg1"); statement.execute("CREATE DATABASE root.sg2"); statement.execute("CREATE DATABASE root.sg3"); // create device template statement.execute("CREATE DEVICE TEMPLATE t1 (s1 INT64, s2 DOUBLE)"); statement.execute("CREATE DEVICE TEMPLATE t2 aligned (s1 INT64, s2 DOUBLE)"); statement.execute("CREATE DEVICE TEMPLATE t3 aligned (s1 INT64)"); } } @Test public void testCreateTemplateAndCreateTimeseries() throws SQLException { // test create device template repeatedly try (Connection connection = EnvFactory.getEnv().getConnection(); Statement statement = connection.createStatement()) { // test datatype and encoding check try { statement.execute( "CREATE DEVICE TEMPLATE str1 (s1 TEXT encoding=GORILLA compressor=SNAPPY, s2 INT32)"); fail(); } catch (SQLException e) { System.out.println(e.getMessage()); Assert.assertEquals( TSStatusCode.CREATE_TEMPLATE_ERROR.getStatusCode() + ": create template error -encoding GORILLA does not support TEXT", e.getMessage()); } try { statement.execute( "CREATE DEVICE TEMPLATE t1 (s1 INT64 encoding=RLE compressor=SNAPPY, s2 INT32)"); fail(); } catch (SQLException e) { Assert.assertEquals( TSStatusCode.METADATA_ERROR.getStatusCode() + ": Duplicated template name: t1", e.getMessage()); } // set device template statement.execute("SET DEVICE TEMPLATE t1 TO root.sg1.d1"); statement.execute("SET DEVICE TEMPLATE t2 TO root.sg1.d2"); statement.execute("SET DEVICE TEMPLATE t3 TO root.sg1.d3"); // test drop template which has been set try { statement.execute("DROP DEVICE TEMPLATE t1"); fail(); } catch (SQLException e) { Assert.assertEquals( TSStatusCode.METADATA_ERROR.getStatusCode() + ": Template [t1] has been set on MTree, cannot be dropped now.", e.getMessage()); } try (ResultSet resultSet = statement.executeQuery("SHOW TIMESERIES root.sg1.**")) { Assert.assertFalse(resultSet.next()); } // create timeseries of device template statement.execute("CREATE TIMESERIES OF DEVICE TEMPLATE ON root.sg1.d1"); statement.execute("CREATE TIMESERIES OF DEVICE TEMPLATE ON root.sg1.d2"); statement.execute("CREATE TIMESERIES OF DEVICE TEMPLATE ON root.sg1.d3"); Set<String> expectedResult = new HashSet<>( Arrays.asList( "root.sg1.d1.s1,INT64,TS_2DIFF,LZ4", "root.sg1.d1.s2,DOUBLE,GORILLA,LZ4", "root.sg1.d2.s1,INT64,TS_2DIFF,LZ4", "root.sg1.d2.s2,DOUBLE,GORILLA,LZ4", "root.sg1.d3.s1,INT64,TS_2DIFF,LZ4")); try (ResultSet resultSet = statement.executeQuery("SHOW TIMESERIES root.sg1.**"); ) { while (resultSet.next()) { String actualResult = resultSet.getString(ColumnHeaderConstant.TIMESERIES) + "," + resultSet.getString(ColumnHeaderConstant.DATATYPE) + "," + resultSet.getString(ColumnHeaderConstant.ENCODING) + "," + resultSet.getString(ColumnHeaderConstant.COMPRESSION); Assert.assertTrue(expectedResult.contains(actualResult)); expectedResult.remove(actualResult); } } Assert.assertTrue(expectedResult.isEmpty()); try (ResultSet resultSet = statement.executeQuery("COUNT TIMESERIES root.sg1.**")) { resultSet.next(); Assert.assertEquals(5, resultSet.getLong(1)); } expectedResult = new HashSet<>(Arrays.asList("root.sg1.d1,false", "root.sg1.d2,true", "root.sg1.d3,true")); try (ResultSet resultSet = statement.executeQuery("SHOW DEVICES root.sg1.**")) { while (resultSet.next()) { String actualResult = resultSet.getString(ColumnHeaderConstant.DEVICE) + "," + resultSet.getString(ColumnHeaderConstant.IS_ALIGNED); Assert.assertTrue(expectedResult.contains(actualResult)); expectedResult.remove(actualResult); } } Assert.assertTrue(expectedResult.isEmpty()); try { statement.execute("UNSET DEVICE TEMPLATE t1 FROM root.sg1.d1"); fail(); } catch (SQLException e) { Assert.assertEquals( TSStatusCode.TEMPLATE_IS_IN_USE.getStatusCode() + ": Template is in use on root.sg1.d1", e.getMessage()); } } } @Test public void testCreateAndSetSchemaTemplate() throws SQLException { // test create device template repeatedly try (Connection connection = EnvFactory.getEnv().getConnection(); Statement statement = connection.createStatement()) { try { statement.execute( "CREATE DEVICE TEMPLATE t1 (s1 INT64 encoding=RLE compressor=SNAPPY, s2 INT32)"); fail(); } catch (SQLException e) { Assert.assertEquals( TSStatusCode.METADATA_ERROR.getStatusCode() + ": Duplicated template name: t1", e.getMessage()); } // set device template statement.execute("SET DEVICE TEMPLATE t1 TO root.sg1.d1"); statement.execute("SET DEVICE TEMPLATE t2 TO root.sg1.d2"); try (ResultSet resultSet = statement.executeQuery("SHOW TIMESERIES root.sg1.**")) { Assert.assertFalse(resultSet.next()); } // set using device template statement.execute("INSERT INTO root.sg1.d1(time,s1) VALUES (1,1)"); statement.execute("INSERT INTO root.sg1.d2(time,s1) ALIGNED VALUES (1,1)"); Set<String> expectedResult = new HashSet<>( Arrays.asList( "root.sg1.d1.s1,INT64,TS_2DIFF,LZ4", "root.sg1.d1.s2,DOUBLE,GORILLA,LZ4", "root.sg1.d2.s1,INT64,TS_2DIFF,LZ4", "root.sg1.d2.s2,DOUBLE,GORILLA,LZ4")); try (ResultSet resultSet = statement.executeQuery("SHOW TIMESERIES root.sg1.**")) { while (resultSet.next()) { String actualResult = resultSet.getString(ColumnHeaderConstant.TIMESERIES) + "," + resultSet.getString(ColumnHeaderConstant.DATATYPE) + "," + resultSet.getString(ColumnHeaderConstant.ENCODING) + "," + resultSet.getString(ColumnHeaderConstant.COMPRESSION); Assert.assertTrue(expectedResult.contains(actualResult)); expectedResult.remove(actualResult); } } Assert.assertTrue(expectedResult.isEmpty()); try (ResultSet resultSet = statement.executeQuery("COUNT TIMESERIES root.sg1.**")) { resultSet.next(); Assert.assertEquals(4, resultSet.getLong(1)); } expectedResult = new HashSet<>(Arrays.asList("root.sg1.d1,false", "root.sg1.d2,true")); try (ResultSet resultSet = statement.executeQuery("SHOW DEVICES root.sg1.**")) { while (resultSet.next()) { String actualResult = resultSet.getString(ColumnHeaderConstant.DEVICE) + "," + resultSet.getString(ColumnHeaderConstant.IS_ALIGNED); Assert.assertTrue(expectedResult.contains(actualResult)); expectedResult.remove(actualResult); } } Assert.assertTrue(expectedResult.isEmpty()); try { statement.execute("UNSET DEVICE TEMPLATE t1 FROM root.sg1.d1"); fail(); } catch (SQLException e) { Assert.assertEquals( TSStatusCode.TEMPLATE_IS_IN_USE.getStatusCode() + ": Template is in use on root.sg1.d1", e.getMessage()); } } } @Test public void testDropAndShowSchemaTemplates() throws SQLException { // show device templates String[] expectedResult = new String[] {"t1", "t2", "t3"}; Set<String> expectedResultSet = new HashSet<>(Arrays.asList(expectedResult)); try (Connection connection = EnvFactory.getEnv().getConnection(); Statement statement = connection.createStatement()) { try (ResultSet resultSet = statement.executeQuery("SHOW DEVICE TEMPLATES")) { while (resultSet.next()) { Assert.assertTrue( expectedResultSet.contains(resultSet.getString(ColumnHeaderConstant.TEMPLATE_NAME))); expectedResultSet.remove(resultSet.getString(ColumnHeaderConstant.TEMPLATE_NAME)); } } Assert.assertEquals(0, expectedResultSet.size()); // drop device template statement.execute("DROP DEVICE TEMPLATE t2"); expectedResult = new String[] {"t1", "t3"}; expectedResultSet = new HashSet<>(Arrays.asList(expectedResult)); try (ResultSet resultSet = statement.executeQuery("SHOW DEVICE TEMPLATES")) { while (resultSet.next()) { Assert.assertTrue( expectedResultSet.contains(resultSet.getString(ColumnHeaderConstant.TEMPLATE_NAME))); expectedResultSet.remove(resultSet.getString(ColumnHeaderConstant.TEMPLATE_NAME)); } } Assert.assertEquals(0, expectedResultSet.size()); } } @Test public void testShowNodesInSchemaTemplate() throws SQLException { // set device template Set<String> expectedResultSet = new HashSet<>(Arrays.asList("s1,INT64,TS_2DIFF,LZ4", "s2,DOUBLE,GORILLA,LZ4")); try (Connection connection = EnvFactory.getEnv().getConnection(); Statement statement = connection.createStatement(); ResultSet resultSet = statement.executeQuery("SHOW NODES IN DEVICE TEMPLATE t1")) { while (resultSet.next()) { String actualResult = resultSet.getString(ColumnHeaderConstant.CHILD_NODES) + "," + resultSet.getString(ColumnHeaderConstant.DATATYPE) + "," + resultSet.getString(ColumnHeaderConstant.ENCODING) + "," + resultSet.getString(ColumnHeaderConstant.COMPRESSION); Assert.assertTrue(expectedResultSet.contains(actualResult)); expectedResultSet.remove(actualResult); } } Assert.assertEquals(0, expectedResultSet.size()); } @Test public void testShowPathsSetOrUsingSchemaTemplate() throws SQLException { try (Connection connection = EnvFactory.getEnv().getConnection(); Statement statement = connection.createStatement()) { // set device template statement.execute("SET DEVICE TEMPLATE t1 TO root.sg1.d1"); statement.execute("SET DEVICE TEMPLATE t1 TO root.sg1.d2"); statement.execute("SET DEVICE TEMPLATE t1 TO root.sg2.d1"); statement.execute("SET DEVICE TEMPLATE t1 TO root.sg2.d2"); statement.execute("SET DEVICE TEMPLATE t2 TO root.sg3.d1"); statement.execute("SET DEVICE TEMPLATE t2 TO root.sg3.d2"); statement.execute("INSERT INTO root.sg3.d2.verify(time, show) ALIGNED VALUES (1, 1)"); try (ResultSet resultSet = statement.executeQuery("SHOW PATHS USING DEVICE TEMPLATE t1")) { String resultRecord; while (resultSet.next()) { resultRecord = resultSet.getString(1); Assert.assertEquals("", resultRecord); } } // activate device template statement.execute("CREATE TIMESERIES OF DEVICE TEMPLATE ON root.sg1.d2"); statement.execute("CREATE TIMESERIES OF DEVICE TEMPLATE ON root.sg2.d1"); // show paths set device template String[] expectedResult = new String[] {"root.sg1.d1", "root.sg2.d2", "root.sg1.d2", "root.sg2.d1"}; Set<String> expectedResultSet = new HashSet<>(Arrays.asList(expectedResult)); try (ResultSet resultSet = statement.executeQuery("SHOW PATHS SET DEVICE TEMPLATE t1")) { String resultRecord; while (resultSet.next()) { resultRecord = resultSet.getString(1); Assert.assertTrue(expectedResultSet.contains(resultRecord)); expectedResultSet.remove(resultRecord); } } Assert.assertEquals(0, expectedResultSet.size()); expectedResult = new String[] {"root.sg3.d1", "root.sg3.d2"}; expectedResultSet = new HashSet<>(Arrays.asList(expectedResult)); try (ResultSet resultSet = statement.executeQuery("SHOW PATHS SET DEVICE TEMPLATE t2")) { String resultRecord; while (resultSet.next()) { resultRecord = resultSet.getString(1); Assert.assertTrue(expectedResultSet.contains(resultRecord)); expectedResultSet.remove(resultRecord); } } Assert.assertEquals(0, expectedResultSet.size()); expectedResult = new String[] {"root.sg1.d2", "root.sg2.d1"}; expectedResultSet = new HashSet<>(Arrays.asList(expectedResult)); try (ResultSet resultSet = statement.executeQuery("SHOW PATHS USING DEVICE TEMPLATE t1")) { String resultRecord; while (resultSet.next()) { resultRecord = resultSet.getString(1); Assert.assertTrue(expectedResultSet.contains(resultRecord)); expectedResultSet.remove(resultRecord); } } Assert.assertEquals(0, expectedResultSet.size()); ResultSet resultSet = statement.executeQuery("SHOW PATHS USING DEVICE TEMPLATE t2"); Assert.assertTrue(resultSet.next()); } } @Test public void testSetAndActivateTemplateOnSGNode() throws SQLException { try (Connection connection = EnvFactory.getEnv().getConnection(); Statement statement = connection.createStatement()) { statement.execute("CREATE DATABASE root.test.sg_satosg"); statement.execute("SET DEVICE TEMPLATE t1 TO root.test.sg_satosg"); statement.execute("INSERT INTO root.test.sg_satosg(time, s1) VALUES (1, 1)"); statement.execute("INSERT INTO root.test.sg_satosg(time, s1) VALUES (2, 2)"); ResultSet resultSet = statement.executeQuery("SHOW TIMESERIES root.test.sg_satosg.**"); Set<String> expRes = new HashSet<>( Arrays.asList(new String[] {"root.test.sg_satosg.s1", "root.test.sg_satosg.s2"})); int resCnt = 0; while (resultSet.next()) { resCnt++; expRes.remove(resultSet.getString(ColumnHeaderConstant.TIMESERIES)); } Assert.assertEquals(2, resCnt); Assert.assertTrue(expRes.isEmpty()); resultSet = statement.executeQuery("SELECT COUNT(s1) from root.test.sg_satosg"); while (resultSet.next()) { Assert.assertEquals(2L, resultSet.getLong("COUNT(root.test.sg_satosg.s1)")); } } } @Test public void testDeleteTimeSeriesWhenUsingTemplate() throws SQLException { try (Connection connection = EnvFactory.getEnv().getConnection(); Statement statement = connection.createStatement()) { // set device template statement.execute("SET DEVICE TEMPLATE t1 TO root.sg1.d1"); statement.execute("SET DEVICE TEMPLATE t2 TO root.sg1.d2"); statement.execute("CREATE TIMESERIES root.sg3.d1.s1 INT64"); // set using device template statement.execute("INSERT INTO root.sg1.d1(time,s1) VALUES (1,1)"); statement.execute("INSERT INTO root.sg1.d2(time,s1) ALIGNED VALUES (1,1)"); statement.execute("INSERT INTO root.sg3.d1(time,s1) VALUES (1,1)"); Set<String> expectedResult = new HashSet<>(Collections.singletonList("1,1,1,1,")); try (ResultSet resultSet = statement.executeQuery("SELECT s1 FROM root.**")) { while (resultSet.next()) { StringBuilder stringBuilder = new StringBuilder(); for (int i = 1; i <= 4; i++) { stringBuilder.append(resultSet.getString(i)).append(","); } String actualResult = stringBuilder.toString(); Assert.assertTrue(expectedResult.contains(actualResult)); expectedResult.remove(actualResult); } } Assert.assertTrue(expectedResult.isEmpty()); statement.execute("DELETE TIMESERIES root.**.s1"); expectedResult = new HashSet<>( Arrays.asList( "root.sg1.d1.s1,INT64,TS_2DIFF,LZ4", "root.sg1.d2.s1,INT64,TS_2DIFF,LZ4")); try (ResultSet resultSet = statement.executeQuery("SHOW TIMESERIES root.**.s1")) { while (resultSet.next()) { String actualResult = resultSet.getString(ColumnHeaderConstant.TIMESERIES) + "," + resultSet.getString(ColumnHeaderConstant.DATATYPE) + "," + resultSet.getString(ColumnHeaderConstant.ENCODING) + "," + resultSet.getString(ColumnHeaderConstant.COMPRESSION); Assert.assertTrue(expectedResult.contains(actualResult)); expectedResult.remove(actualResult); } } Assert.assertTrue(expectedResult.isEmpty()); try (ResultSet resultSet = statement.executeQuery("SELECT s1 FROM root.**")) { Assert.assertFalse(resultSet.next()); } } } @Test public void testSchemaQueryAndFetchWithUnrelatedTemplate() throws SQLException { try (Connection connection = EnvFactory.getEnv().getConnection(); Statement statement = connection.createStatement()) { statement.execute("CREATE DEVICE TEMPLATE t4 (s3 INT64, s4 DOUBLE)"); // set device template statement.execute("SET DEVICE TEMPLATE t1 TO root.sg1.d1"); statement.execute("SET DEVICE TEMPLATE t4 TO root.sg1.d2"); // set using device template statement.execute("INSERT INTO root.sg1.d1(time,s1) VALUES (1,1)"); statement.execute("INSERT INTO root.sg1.d2(time,s3) VALUES (1,1)"); Set<String> expectedResult = new HashSet<>(Collections.singletonList("1,1,")); try (ResultSet resultSet = statement.executeQuery("SELECT s1 FROM root.**")) { while (resultSet.next()) { StringBuilder stringBuilder = new StringBuilder(); for (int i = 1; i <= 2; i++) { stringBuilder.append(resultSet.getString(i)).append(","); } String actualResult = stringBuilder.toString(); Assert.assertTrue(expectedResult.contains(actualResult)); expectedResult.remove(actualResult); } } Assert.assertTrue(expectedResult.isEmpty()); expectedResult = new HashSet<>(Collections.singletonList("root.sg1.d1.s1,INT64,TS_2DIFF,LZ4")); try (ResultSet resultSet = statement.executeQuery("SHOW TIMESERIES root.**.s1")) { while (resultSet.next()) { String actualResult = resultSet.getString(ColumnHeaderConstant.TIMESERIES) + "," + resultSet.getString(ColumnHeaderConstant.DATATYPE) + "," + resultSet.getString(ColumnHeaderConstant.ENCODING) + "," + resultSet.getString(ColumnHeaderConstant.COMPRESSION); Assert.assertTrue(expectedResult.contains(actualResult)); expectedResult.remove(actualResult); } } Assert.assertTrue(expectedResult.isEmpty()); } } @Test public void testInsertDataWithMeasurementsBeyondTemplate() throws Exception { try (Connection connection = EnvFactory.getEnv().getConnection(); Statement statement = connection.createStatement()) { // set device template statement.execute("SET DEVICE TEMPLATE t1 TO root.sg1.d1"); // insert data and auto activate device template statement.execute("INSERT INTO root.sg1.d1(time,s1,s2) VALUES (1,1,1)"); // insert twice to make sure the timeseries in template has been cached statement.execute("INSERT INTO root.sg1.d1(time,s1,s2) VALUES (2,1,1)"); // insert data with extra measurement s3 which should be checked by schema fetch and auto // created statement.execute("INSERT INTO root.sg1.d1(time,s1,s2,s3) VALUES (2,1,1,1)"); try (ResultSet resultSet = statement.executeQuery("count timeseries root.sg1.**")) { Assert.assertTrue(resultSet.next()); long resultRecord = resultSet.getLong(1); Assert.assertEquals(3, resultRecord); } } } @Test public void testUnsetTemplate() throws SQLException { try (Connection connection = EnvFactory.getEnv().getConnection(); Statement statement = connection.createStatement()) { // set device template statement.execute("SET DEVICE TEMPLATE t1 TO root.sg1.d1"); // show paths set device template String[] expectedResult = new String[] {"root.sg1.d1"}; Set<String> expectedResultSet = new HashSet<>(Arrays.asList(expectedResult)); try (ResultSet resultSet = statement.executeQuery("SHOW PATHS SET DEVICE TEMPLATE t1")) { String resultRecord; while (resultSet.next()) { resultRecord = resultSet.getString(1); Assert.assertTrue(expectedResultSet.contains(resultRecord)); expectedResultSet.remove(resultRecord); } } Assert.assertEquals(0, expectedResultSet.size()); // unset device template statement.execute("UNSET DEVICE TEMPLATE t1 FROM root.sg1.d1"); try (ResultSet resultSet = statement.executeQuery("SHOW PATHS SET DEVICE TEMPLATE t1")) { Assert.assertFalse(resultSet.next()); } } } @Test public void testTemplateSetAndTimeSeriesExistenceCheck() throws SQLException { try (Connection connection = EnvFactory.getEnv().getConnection(); Statement statement = connection.createStatement()) { // set device template statement.execute("SET DEVICE TEMPLATE t1 TO root.sg1.d1"); // show paths set device template String[] expectedResult = new String[] {"root.sg1.d1"}; Set<String> expectedResultSet = new HashSet<>(Arrays.asList(expectedResult)); try (ResultSet resultSet = statement.executeQuery("SHOW PATHS SET DEVICE TEMPLATE t1")) { String resultRecord; while (resultSet.next()) { resultRecord = resultSet.getString(1); Assert.assertTrue(expectedResultSet.contains(resultRecord)); expectedResultSet.remove(resultRecord); } } Assert.assertEquals(0, expectedResultSet.size()); try { statement.execute("CREATE TIMESERIES root.sg1.d1.s INT32"); fail(); } catch (SQLException e) { Assert.assertEquals( "516: Cannot create timeseries [root.sg1.d1.s] since device template [t1] already set on path [root.sg1.d1].", e.getMessage()); } // unset device template statement.execute("UNSET DEVICE TEMPLATE t1 FROM root.sg1.d1"); try (ResultSet resultSet = statement.executeQuery("SHOW PATHS SET DEVICE TEMPLATE t1")) { Assert.assertFalse(resultSet.next()); } statement.execute("CREATE TIMESERIES root.sg1.d1.s INT32"); try { statement.execute("SET DEVICE TEMPLATE t1 TO root.sg1.d1"); } catch (SQLException e) { Assert.assertEquals( "516: Cannot set device template [t1] to path [root.sg1.d1] since there's timeseries under path [root.sg1.d1].", e.getMessage()); } statement.execute("DELETE TIMESERIES root.sg1.d1.s"); statement.execute("SET DEVICE TEMPLATE t1 TO root.sg1.d1"); expectedResult = new String[] {"root.sg1.d1"}; expectedResultSet = new HashSet<>(Arrays.asList(expectedResult)); try (ResultSet resultSet = statement.executeQuery("SHOW PATHS SET DEVICE TEMPLATE t1")) { String resultRecord; while (resultSet.next()) { resultRecord = resultSet.getString(1); Assert.assertTrue(expectedResultSet.contains(resultRecord)); expectedResultSet.remove(resultRecord); } } Assert.assertEquals(0, expectedResultSet.size()); statement.execute("SET DEVICE TEMPLATE t1 TO root.sg1.d2.tmp.m"); try { statement.execute("CREATE TIMESERIES root.sg1.d2 INT32"); } catch (SQLException e) { Assert.assertEquals( "516: Cannot create timeseries [root.sg1.d2] since device template [t1] already set on path [root.sg1.d2.tmp.m].", e.getMessage()); } try { statement.execute("CREATE TIMESERIES root.sg1.d2.s(tmp) INT32"); } catch (SQLException e) { Assert.assertEquals( "516: Cannot create timeseries [root.sg1.d2.s] since device template [t1] already set on path [root.sg1.d2.tmp.m].", e.getMessage()); } statement.execute("CREATE TIMESERIES root.sg1.d2.s INT32"); } } @Test public void testShowTemplateSeriesWithFuzzyQuery() throws Exception { // test create device template repeatedly try (Connection connection = EnvFactory.getEnv().getConnection(); Statement statement = connection.createStatement()) { // set device template statement.execute("SET DEVICE TEMPLATE t1 TO root.sg1"); statement.execute("SET DEVICE TEMPLATE t2 TO root.sg2"); statement.execute("SET DEVICE TEMPLATE t3 TO root.sg3"); // activate device template statement.execute("create timeseries using device template on root.sg1.d1"); statement.execute("create timeseries using device template on root.sg2.d2"); statement.execute("create timeseries using device template on root.sg3.d3"); Set<String> expectedResult = new HashSet<>( Arrays.asList( "root.sg1.d1.s1,INT64,TS_2DIFF,LZ4", "root.sg1.d1.s2,DOUBLE,GORILLA,LZ4", "root.sg2.d2.s1,INT64,TS_2DIFF,LZ4", "root.sg2.d2.s2,DOUBLE,GORILLA,LZ4", "root.sg3.d3.s1,INT64,TS_2DIFF,LZ4")); try (ResultSet resultSet = statement.executeQuery("SHOW TIMESERIES root.sg*.*.s*")) { while (resultSet.next()) { String actualResult = resultSet.getString(ColumnHeaderConstant.TIMESERIES) + "," + resultSet.getString(ColumnHeaderConstant.DATATYPE) + "," + resultSet.getString(ColumnHeaderConstant.ENCODING) + "," + resultSet.getString(ColumnHeaderConstant.COMPRESSION); Assert.assertTrue(expectedResult.contains(actualResult)); expectedResult.remove(actualResult); } } Assert.assertTrue(expectedResult.isEmpty()); expectedResult = new HashSet<>( Arrays.asList( "root.sg1.d1.s1,INT64,TS_2DIFF,LZ4", "root.sg1.d1.s2,DOUBLE,GORILLA,LZ4")); try (ResultSet resultSet = statement.executeQuery("SHOW TIMESERIES root.sg1.d1.s*")) { while (resultSet.next()) { String actualResult = resultSet.getString(ColumnHeaderConstant.TIMESERIES) + "," + resultSet.getString(ColumnHeaderConstant.DATATYPE) + "," + resultSet.getString(ColumnHeaderConstant.ENCODING) + "," + resultSet.getString(ColumnHeaderConstant.COMPRESSION); Assert.assertTrue(expectedResult.contains(actualResult)); expectedResult.remove(actualResult); } } Assert.assertTrue(expectedResult.isEmpty()); } } @Test public void testEmptySchemaTemplate() throws Exception { try (Connection connection = EnvFactory.getEnv().getConnection(); Statement statement = connection.createStatement()) { // create empty device template statement.execute("create device template e_t"); // set device template statement.execute("SET DEVICE TEMPLATE e_t TO root.sg1"); try (ResultSet resultSet = statement.executeQuery("show nodes in device template e_t")) { Assert.assertFalse(resultSet.next()); } try (ResultSet resultSet = statement.executeQuery("show paths set device template e_t")) { Assert.assertTrue(resultSet.next()); Assert.assertFalse(resultSet.next()); } statement.execute("alter device template e_t add(s1 int32)"); statement.execute("insert into root.sg1.d(time, s2, s3) values(1, 1, 1)"); Set<String> expectedResult = new HashSet<>( Arrays.asList( "root.sg1.d.s1,INT32,TS_2DIFF,LZ4", "root.sg1.d.s2,DOUBLE,GORILLA,LZ4", "root.sg1.d.s3,DOUBLE,GORILLA,LZ4")); try (ResultSet resultSet = statement.executeQuery("SHOW TIMESERIES root.sg*.*.s*")) { while (resultSet.next()) { String actualResult = resultSet.getString(ColumnHeaderConstant.TIMESERIES) + "," + resultSet.getString(ColumnHeaderConstant.DATATYPE) + "," + resultSet.getString(ColumnHeaderConstant.ENCODING) + "," + resultSet.getString(ColumnHeaderConstant.COMPRESSION); Assert.assertTrue(expectedResult.contains(actualResult)); expectedResult.remove(actualResult); } } Assert.assertTrue(expectedResult.isEmpty()); } } @Test public void testLevelCountWithTemplate() throws Exception { try (Connection connection = EnvFactory.getEnv().getConnection(); Statement statement = connection.createStatement()) { statement.execute("SET DEVICE TEMPLATE t1 TO root.sg1.d1"); statement.execute("SET DEVICE TEMPLATE t2 TO root.sg1.d2"); statement.execute("SET DEVICE TEMPLATE t3 TO root.sg1.d3"); // create timeseries of device template statement.execute("CREATE TIMESERIES OF DEVICE TEMPLATE ON root.sg1.d1"); statement.execute("CREATE TIMESERIES OF DEVICE TEMPLATE ON root.sg1.d2"); statement.execute("CREATE TIMESERIES OF DEVICE TEMPLATE ON root.sg1.d3"); // count Set<String> expectedResult = new HashSet<>(Arrays.asList("root.sg1.d1,2", "root.sg1.d2,2", "root.sg1.d3,1")); try (ResultSet resultSet = statement.executeQuery("COUNT TIMESERIES root.sg1.** group by level=2")) { while (resultSet.next()) { String actualResult = resultSet.getString(ColumnHeaderConstant.COLUMN) + "," + resultSet.getString(ColumnHeaderConstant.COUNT_TIMESERIES); Assert.assertTrue(expectedResult.contains(actualResult)); expectedResult.remove(actualResult); } } Assert.assertTrue(expectedResult.isEmpty()); } } @Test public void testAlterTemplateTimeseries() throws Exception { try (Connection connection = EnvFactory.getEnv().getConnection(); Statement statement = connection.createStatement()) { statement.execute("SET DEVICE TEMPLATE t1 TO root.sg1.d1;"); statement.execute("CREATE TIMESERIES OF DEVICE TEMPLATE ON root.sg1.d1;"); try { statement.execute( "ALTER timeseries root.sg1.d1.s1 UPSERT tags(s0_tag1=s0_tag1, s0_tag2=s0_tag2) attributes(s0_attr1=s0_attr1, s0_attr2=s0_attr2);"); Assert.fail("expect exception because the template timeseries does not support tag"); } catch (Exception e) { Assert.assertTrue( e.getMessage() .contains( "Cannot alter template timeseries [root.sg1.d1.s1] since device template [t1] already set on path [root.sg1.d1]")); } try { statement.execute("ALTER timeseries root.sg1.d1.s1 UPSERT ALIAS=s0Alias;"); Assert.fail("expect exception because the template timeseries does not support alias"); } catch (Exception e) { Assert.assertTrue( e.getMessage() .contains( "Cannot alter template timeseries [root.sg1.d1.s1] since device template [t1] already set on path [root.sg1.d1]")); } } } @Test public void testActivateAndDropEmptyTemplate() throws Exception { try (Connection connection = EnvFactory.getEnv().getConnection(); Statement statement = connection.createStatement()) { statement.execute("CREATE DEVICE TEMPLATE e_t;"); statement.execute("SET DEVICE TEMPLATE e_t TO root.sg1.t.d1;"); statement.execute("insert into root.sg1.t.d2(timestamp,s1) values(now(),false);"); statement.execute("CREATE TIMESERIES OF DEVICE TEMPLATE ON root.sg1.t.d1;"); try (ResultSet resultSet = statement.executeQuery("show nodes in device template e_t")) { Assert.assertFalse(resultSet.next()); } try (ResultSet resultSet = statement.executeQuery("show paths set device template e_t")) { Assert.assertTrue(resultSet.next()); Assert.assertFalse(resultSet.next()); } statement.execute("DEACTIVATE DEVICE TEMPLATE FROM root.sg1.t.d1;"); statement.execute("UNSET DEVICE TEMPLATE e_t FROM root.sg1.t.d1;"); try (ResultSet resultSet = statement.executeQuery("show paths set device template e_t")) { Assert.assertFalse(resultSet.next()); } } } }
google/grafika
36,016
app/src/main/java/com/android/grafika/TextureFromCameraActivity.java
/* * Copyright 2014 Google Inc. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.android.grafika; import android.graphics.SurfaceTexture; import android.hardware.Camera; import android.opengl.GLES20; import android.opengl.Matrix; import android.os.Bundle; import android.os.Handler; import android.os.Looper; import android.os.Message; import android.util.Log; import android.view.MotionEvent; import android.view.Surface; import android.view.SurfaceHolder; import android.view.SurfaceView; import android.widget.SeekBar; import android.widget.TextView; import android.app.Activity; import com.android.grafika.gles.Drawable2d; import com.android.grafika.gles.EglCore; import com.android.grafika.gles.GlUtil; import com.android.grafika.gles.Sprite2d; import com.android.grafika.gles.Texture2dProgram; import com.android.grafika.gles.WindowSurface; import java.io.IOException; import java.lang.ref.WeakReference; /** * Direct the Camera preview to a GLES texture and manipulate it. * <p> * We manage the Camera and GLES rendering from a dedicated thread. We don't animate anything, * so we don't need a Choreographer heartbeat -- just redraw when we get a new frame from the * camera or the user has caused a change in size or position. * <p> * The Camera needs to follow the activity pause/resume cycle so we don't keep it locked * while we're in the background. Also, for power reasons, we don't want to keep getting * frames when the screen is off. As noted in * http://source.android.com/devices/graphics/architecture.html#activity * the Surface lifecycle isn't quite the same as the activity's. We follow approach #1. * <p> * The tricky part about the lifecycle is that our SurfaceView's Surface can outlive the * Activity, and we can get surface callbacks while paused, so we need to keep track of it * in a static variable and be prepared for calls at odd times. * <p> * The zoom, size, and rotate values are determined by the values stored in the "seek bars" * (sliders). When the device is rotated, the Activity is paused and resumed, but the * controls retain their value, which is kind of nice. The position, set by touch, is lost * on rotation. * <p> * The UI updates go through a multi-stage process: * <ol> * <li> The user updates a slider. * <li> The new value is passed as a percent to the render thread. * <li> The render thread converts the percent to something concrete (e.g. size in pixels). * The rect geometry is updated. * <li> (For most things) The values computed by the render thread are sent back to the main * UI thread. * <li> (For most things) The UI thread updates some text views. * </ol> */ public class TextureFromCameraActivity extends Activity implements SurfaceHolder.Callback, SeekBar.OnSeekBarChangeListener { private static final String TAG = MainActivity.TAG; private static final int DEFAULT_ZOOM_PERCENT = 0; // 0-100 private static final int DEFAULT_SIZE_PERCENT = 50; // 0-100 private static final int DEFAULT_ROTATE_PERCENT = 0; // 0-100 // Requested values; actual may differ. private static final int REQ_CAMERA_WIDTH = 1280; private static final int REQ_CAMERA_HEIGHT = 720; private static final int REQ_CAMERA_FPS = 30; // The holder for our SurfaceView. The Surface can outlive the Activity (e.g. when // the screen is turned off and back on with the power button). // // This becomes non-null after the surfaceCreated() callback is called, and gets set // to null when surfaceDestroyed() is called. private static SurfaceHolder sSurfaceHolder; // Thread that handles rendering and controls the camera. Started in onResume(), // stopped in onPause(). private RenderThread mRenderThread; // Receives messages from renderer thread. private MainHandler mHandler; // User controls. private SeekBar mZoomBar; private SeekBar mSizeBar; private SeekBar mRotateBar; // These values are passed to us by the camera/render thread, and displayed in the UI. // We could also just peek at the values in the RenderThread object, but we'd need to // synchronize access carefully. private int mCameraPreviewWidth, mCameraPreviewHeight; private float mCameraPreviewFps; private int mRectWidth, mRectHeight; private int mZoomWidth, mZoomHeight; private int mRotateDeg; @Override protected void onCreate(Bundle savedInstanceState) { super.onCreate(savedInstanceState); setContentView(R.layout.activity_texture_from_camera); mHandler = new MainHandler(this); SurfaceView sv = (SurfaceView) findViewById(R.id.cameraOnTexture_surfaceView); SurfaceHolder sh = sv.getHolder(); sh.addCallback(this); mZoomBar = (SeekBar) findViewById(R.id.tfcZoom_seekbar); mSizeBar = (SeekBar) findViewById(R.id.tfcSize_seekbar); mRotateBar = (SeekBar) findViewById(R.id.tfcRotate_seekbar); mZoomBar.setProgress(DEFAULT_ZOOM_PERCENT); mSizeBar.setProgress(DEFAULT_SIZE_PERCENT); mRotateBar.setProgress(DEFAULT_ROTATE_PERCENT); mZoomBar.setOnSeekBarChangeListener(this); mSizeBar.setOnSeekBarChangeListener(this); mRotateBar.setOnSeekBarChangeListener(this); updateControls(); } @Override protected void onResume() { Log.d(TAG, "onResume BEGIN"); super.onResume(); if (!PermissionHelper.hasCameraPermission(this)) { PermissionHelper.requestCameraPermission(this, false); return; } mRenderThread = new RenderThread(mHandler); mRenderThread.setName("TexFromCam Render"); mRenderThread.start(); mRenderThread.waitUntilReady(); RenderHandler rh = mRenderThread.getHandler(); rh.sendZoomValue(mZoomBar.getProgress()); rh.sendSizeValue(mSizeBar.getProgress()); rh.sendRotateValue(mRotateBar.getProgress()); if (sSurfaceHolder != null) { Log.d(TAG, "Sending previous surface"); rh.sendSurfaceAvailable(sSurfaceHolder, false); } else { Log.d(TAG, "No previous surface"); } Log.d(TAG, "onResume END"); } @Override protected void onPause() { Log.d(TAG, "onPause BEGIN"); super.onPause(); if (mRenderThread == null) { return; } RenderHandler rh = mRenderThread.getHandler(); rh.sendShutdown(); try { mRenderThread.join(); } catch (InterruptedException ie) { // not expected throw new RuntimeException("join was interrupted", ie); } mRenderThread = null; Log.d(TAG, "onPause END"); } @Override // SurfaceHolder.Callback public void surfaceCreated(SurfaceHolder holder) { Log.d(TAG, "surfaceCreated holder=" + holder + " (static=" + sSurfaceHolder + ")"); if (sSurfaceHolder != null) { throw new RuntimeException("sSurfaceHolder is already set"); } sSurfaceHolder = holder; if (mRenderThread != null) { // Normal case -- render thread is running, tell it about the new surface. RenderHandler rh = mRenderThread.getHandler(); rh.sendSurfaceAvailable(holder, true); } else { // Sometimes see this on 4.4.x N5: power off, power on, unlock, with device in // landscape and a lock screen that requires portrait. The surface-created // message is showing up after onPause(). // // Chances are good that the surface will be destroyed before the activity is // unpaused, but we track it anyway. If the activity is un-paused and we start // the RenderThread, the SurfaceHolder will be passed in right after the thread // is created. Log.d(TAG, "render thread not running"); } } @Override // SurfaceHolder.Callback public void surfaceChanged(SurfaceHolder holder, int format, int width, int height) { Log.d(TAG, "surfaceChanged fmt=" + format + " size=" + width + "x" + height + " holder=" + holder); if (mRenderThread != null) { RenderHandler rh = mRenderThread.getHandler(); rh.sendSurfaceChanged(format, width, height); } else { Log.d(TAG, "Ignoring surfaceChanged"); return; } } @Override // SurfaceHolder.Callback public void surfaceDestroyed(SurfaceHolder holder) { // In theory we should tell the RenderThread that the surface has been destroyed. if (mRenderThread != null) { RenderHandler rh = mRenderThread.getHandler(); rh.sendSurfaceDestroyed(); } Log.d(TAG, "surfaceDestroyed holder=" + holder); sSurfaceHolder = null; } @Override // SeekBar.OnSeekBarChangeListener public void onProgressChanged(SeekBar seekBar, int progress, boolean fromUser) { if (mRenderThread == null) { // Could happen if we programmatically update the values after setting a listener // but before starting the thread. Also, easy to cause this by scrubbing the seek // bar with one finger then tapping "recents" with another. Log.w(TAG, "Ignoring onProgressChanged received w/o RT running"); return; } RenderHandler rh = mRenderThread.getHandler(); // "progress" ranges from 0 to 100 if (seekBar == mZoomBar) { //Log.v(TAG, "zoom: " + progress); rh.sendZoomValue(progress); } else if (seekBar == mSizeBar) { //Log.v(TAG, "size: " + progress); rh.sendSizeValue(progress); } else if (seekBar == mRotateBar) { //Log.v(TAG, "rotate: " + progress); rh.sendRotateValue(progress); } else { throw new RuntimeException("unknown seek bar"); } // If we're getting preview frames quickly enough we don't really need this, but // we don't want to have chunky-looking resize movement if the camera is slow. // OTOH, if we get the updates too quickly (60fps camera?), this could jam us // up and cause us to run behind. So use with caution. rh.sendRedraw(); } @Override // SeekBar.OnSeekBarChangeListener public void onStartTrackingTouch(SeekBar seekBar) {} @Override // SeekBar.OnSeekBarChangeListener public void onStopTrackingTouch(SeekBar seekBar) {} @Override /** * Handles any touch events that aren't grabbed by one of the controls. */ public boolean onTouchEvent(MotionEvent e) { float x = e.getX(); float y = e.getY(); switch (e.getAction()) { case MotionEvent.ACTION_MOVE: case MotionEvent.ACTION_DOWN: //Log.v(TAG, "onTouchEvent act=" + e.getAction() + " x=" + x + " y=" + y); if (mRenderThread != null) { RenderHandler rh = mRenderThread.getHandler(); rh.sendPosition((int) x, (int) y); // Forcing a redraw can cause sluggish-looking behavior if the touch // events arrive quickly. //rh.sendRedraw(); } break; default: break; } return true; } /** * Updates the current state of the controls. */ private void updateControls() { String str = getString(R.string.tfcCameraParams, mCameraPreviewWidth, mCameraPreviewHeight, mCameraPreviewFps); TextView tv = (TextView) findViewById(R.id.tfcCameraParams_text); tv.setText(str); str = getString(R.string.tfcRectSize, mRectWidth, mRectHeight); tv = (TextView) findViewById(R.id.tfcRectSize_text); tv.setText(str); str = getString(R.string.tfcZoomArea, mZoomWidth, mZoomHeight); tv = (TextView) findViewById(R.id.tfcZoomArea_text); tv.setText(str); } /** * Custom message handler for main UI thread. * <p> * Receives messages from the renderer thread with UI-related updates, like the camera * parameters (which we show in a text message on screen). */ private static class MainHandler extends Handler { private static final int MSG_SEND_CAMERA_PARAMS0 = 0; private static final int MSG_SEND_CAMERA_PARAMS1 = 1; private static final int MSG_SEND_RECT_SIZE = 2; private static final int MSG_SEND_ZOOM_AREA = 3; private static final int MSG_SEND_ROTATE_DEG = 4; private WeakReference<TextureFromCameraActivity> mWeakActivity; public MainHandler(TextureFromCameraActivity activity) { mWeakActivity = new WeakReference<TextureFromCameraActivity>(activity); } /** * Sends the updated camera parameters to the main thread. * <p> * Call from render thread. */ public void sendCameraParams(int width, int height, float fps) { // The right way to do this is to bundle them up into an object. The lazy // way is to send two messages. sendMessage(obtainMessage(MSG_SEND_CAMERA_PARAMS0, width, height)); sendMessage(obtainMessage(MSG_SEND_CAMERA_PARAMS1, (int) (fps * 1000), 0)); } /** * Sends the updated rect size to the main thread. * <p> * Call from render thread. */ public void sendRectSize(int width, int height) { sendMessage(obtainMessage(MSG_SEND_RECT_SIZE, width, height)); } /** * Sends the updated zoom area to the main thread. * <p> * Call from render thread. */ public void sendZoomArea(int width, int height) { sendMessage(obtainMessage(MSG_SEND_ZOOM_AREA, width, height)); } /** * Sends the updated zoom area to the main thread. * <p> * Call from render thread. */ public void sendRotateDeg(int rot) { sendMessage(obtainMessage(MSG_SEND_ROTATE_DEG, rot, 0)); } @Override public void handleMessage(Message msg) { TextureFromCameraActivity activity = mWeakActivity.get(); if (activity == null) { Log.d(TAG, "Got message for dead activity"); return; } switch (msg.what) { case MSG_SEND_CAMERA_PARAMS0: { activity.mCameraPreviewWidth = msg.arg1; activity.mCameraPreviewHeight = msg.arg2; break; } case MSG_SEND_CAMERA_PARAMS1: { activity.mCameraPreviewFps = msg.arg1 / 1000.0f; activity.updateControls(); break; } case MSG_SEND_RECT_SIZE: { activity.mRectWidth = msg.arg1; activity.mRectHeight = msg.arg2; activity.updateControls(); break; } case MSG_SEND_ZOOM_AREA: { activity.mZoomWidth = msg.arg1; activity.mZoomHeight = msg.arg2; activity.updateControls(); break; } case MSG_SEND_ROTATE_DEG: { activity.mRotateDeg = msg.arg1; activity.updateControls(); break; } default: throw new RuntimeException("Unknown message " + msg.what); } } } /** * Thread that handles all rendering and camera operations. */ private static class RenderThread extends Thread implements SurfaceTexture.OnFrameAvailableListener { // Object must be created on render thread to get correct Looper, but is used from // UI thread, so we need to declare it volatile to ensure the UI thread sees a fully // constructed object. private volatile RenderHandler mHandler; // Used to wait for the thread to start. private Object mStartLock = new Object(); private boolean mReady = false; private MainHandler mMainHandler; private Camera mCamera; private int mCameraPreviewWidth, mCameraPreviewHeight; private EglCore mEglCore; private WindowSurface mWindowSurface; private int mWindowSurfaceWidth; private int mWindowSurfaceHeight; // Receives the output from the camera preview. private SurfaceTexture mCameraTexture; // Orthographic projection matrix. private float[] mDisplayProjectionMatrix = new float[16]; private Texture2dProgram mTexProgram; private final ScaledDrawable2d mRectDrawable = new ScaledDrawable2d(Drawable2d.Prefab.RECTANGLE); private final Sprite2d mRect = new Sprite2d(mRectDrawable); private int mZoomPercent = DEFAULT_ZOOM_PERCENT; private int mSizePercent = DEFAULT_SIZE_PERCENT; private int mRotatePercent = DEFAULT_ROTATE_PERCENT; private float mPosX, mPosY; /** * Constructor. Pass in the MainHandler, which allows us to send stuff back to the * Activity. */ public RenderThread(MainHandler handler) { mMainHandler = handler; } /** * Thread entry point. */ @Override public void run() { Looper.prepare(); // We need to create the Handler before reporting ready. mHandler = new RenderHandler(this); synchronized (mStartLock) { mReady = true; mStartLock.notify(); // signal waitUntilReady() } // Prepare EGL and open the camera before we start handling messages. mEglCore = new EglCore(null, 0); openCamera(REQ_CAMERA_WIDTH, REQ_CAMERA_HEIGHT, REQ_CAMERA_FPS); Looper.loop(); Log.d(TAG, "looper quit"); releaseCamera(); releaseGl(); mEglCore.release(); synchronized (mStartLock) { mReady = false; } } /** * Waits until the render thread is ready to receive messages. * <p> * Call from the UI thread. */ public void waitUntilReady() { synchronized (mStartLock) { while (!mReady) { try { mStartLock.wait(); } catch (InterruptedException ie) { /* not expected */ } } } } /** * Shuts everything down. */ private void shutdown() { Log.d(TAG, "shutdown"); Looper.myLooper().quit(); } /** * Returns the render thread's Handler. This may be called from any thread. */ public RenderHandler getHandler() { return mHandler; } /** * Handles the surface-created callback from SurfaceView. Prepares GLES and the Surface. */ private void surfaceAvailable(SurfaceHolder holder, boolean newSurface) { Surface surface = holder.getSurface(); mWindowSurface = new WindowSurface(mEglCore, surface, false); mWindowSurface.makeCurrent(); // Create and configure the SurfaceTexture, which will receive frames from the // camera. We set the textured rect's program to render from it. mTexProgram = new Texture2dProgram(Texture2dProgram.ProgramType.TEXTURE_EXT); int textureId = mTexProgram.createTextureObject(); mCameraTexture = new SurfaceTexture(textureId); mRect.setTexture(textureId); if (!newSurface) { // This Surface was established on a previous run, so no surfaceChanged() // message is forthcoming. Finish the surface setup now. // // We could also just call this unconditionally, and perhaps do an unnecessary // bit of reallocating if a surface-changed message arrives. mWindowSurfaceWidth = mWindowSurface.getWidth(); mWindowSurfaceHeight = mWindowSurface.getHeight(); finishSurfaceSetup(); } mCameraTexture.setOnFrameAvailableListener(this); } /** * Releases most of the GL resources we currently hold (anything allocated by * surfaceAvailable()). * <p> * Does not release EglCore. */ private void releaseGl() { GlUtil.checkGlError("releaseGl start"); if (mWindowSurface != null) { mWindowSurface.release(); mWindowSurface = null; } if (mTexProgram != null) { mTexProgram.release(); mTexProgram = null; } GlUtil.checkGlError("releaseGl done"); mEglCore.makeNothingCurrent(); } /** * Handles the surfaceChanged message. * <p> * We always receive surfaceChanged() after surfaceCreated(), but surfaceAvailable() * could also be called with a Surface created on a previous run. So this may not * be called. */ private void surfaceChanged(int width, int height) { Log.d(TAG, "RenderThread surfaceChanged " + width + "x" + height); mWindowSurfaceWidth = width; mWindowSurfaceHeight = height; finishSurfaceSetup(); } /** * Handles the surfaceDestroyed message. */ private void surfaceDestroyed() { // In practice this never appears to be called -- the activity is always paused // before the surface is destroyed. In theory it could be called though. Log.d(TAG, "RenderThread surfaceDestroyed"); releaseGl(); } /** * Sets up anything that depends on the window size. * <p> * Open the camera (to set mCameraAspectRatio) before calling here. */ private void finishSurfaceSetup() { int width = mWindowSurfaceWidth; int height = mWindowSurfaceHeight; Log.d(TAG, "finishSurfaceSetup size=" + width + "x" + height + " camera=" + mCameraPreviewWidth + "x" + mCameraPreviewHeight); // Use full window. GLES20.glViewport(0, 0, width, height); // Simple orthographic projection, with (0,0) in lower-left corner. Matrix.orthoM(mDisplayProjectionMatrix, 0, 0, width, 0, height, -1, 1); // Default position is center of screen. mPosX = width / 2.0f; mPosY = height / 2.0f; updateGeometry(); // Ready to go, start the camera. Log.d(TAG, "starting camera preview"); try { mCamera.setPreviewTexture(mCameraTexture); } catch (IOException ioe) { throw new RuntimeException(ioe); } mCamera.startPreview(); } /** * Updates the geometry of mRect, based on the size of the window and the current * values set by the UI. */ private void updateGeometry() { int width = mWindowSurfaceWidth; int height = mWindowSurfaceHeight; int smallDim = Math.min(width, height); // Max scale is a bit larger than the screen, so we can show over-size. float scaled = smallDim * (mSizePercent / 100.0f) * 1.25f; float cameraAspect = (float) mCameraPreviewWidth / mCameraPreviewHeight; int newWidth = Math.round(scaled * cameraAspect); int newHeight = Math.round(scaled); float zoomFactor = 1.0f - (mZoomPercent / 100.0f); int rotAngle = Math.round(360 * (mRotatePercent / 100.0f)); mRect.setScale(newWidth, newHeight); mRect.setPosition(mPosX, mPosY); mRect.setRotation(rotAngle); mRectDrawable.setScale(zoomFactor); mMainHandler.sendRectSize(newWidth, newHeight); mMainHandler.sendZoomArea(Math.round(mCameraPreviewWidth * zoomFactor), Math.round(mCameraPreviewHeight * zoomFactor)); mMainHandler.sendRotateDeg(rotAngle); } @Override // SurfaceTexture.OnFrameAvailableListener; runs on arbitrary thread public void onFrameAvailable(SurfaceTexture surfaceTexture) { mHandler.sendFrameAvailable(); } /** * Handles incoming frame of data from the camera. */ private void frameAvailable() { mCameraTexture.updateTexImage(); draw(); } /** * Draws the scene and submits the buffer. */ private void draw() { GlUtil.checkGlError("draw start"); GLES20.glClearColor(0.0f, 0.0f, 0.0f, 1.0f); GLES20.glClear(GLES20.GL_COLOR_BUFFER_BIT); mRect.draw(mTexProgram, mDisplayProjectionMatrix); mWindowSurface.swapBuffers(); GlUtil.checkGlError("draw done"); } private void setZoom(int percent) { mZoomPercent = percent; updateGeometry(); } private void setSize(int percent) { mSizePercent = percent; updateGeometry(); } private void setRotate(int percent) { mRotatePercent = percent; updateGeometry(); } private void setPosition(int x, int y) { mPosX = x; mPosY = mWindowSurfaceHeight - y; // GLES is upside-down updateGeometry(); } /** * Opens a camera, and attempts to establish preview mode at the specified width * and height with a fixed frame rate. * <p> * Sets mCameraPreviewWidth / mCameraPreviewHeight. */ private void openCamera(int desiredWidth, int desiredHeight, int desiredFps) { if (mCamera != null) { throw new RuntimeException("camera already initialized"); } Camera.CameraInfo info = new Camera.CameraInfo(); // Try to find a front-facing camera (e.g. for videoconferencing). int numCameras = Camera.getNumberOfCameras(); for (int i = 0; i < numCameras; i++) { Camera.getCameraInfo(i, info); if (info.facing == Camera.CameraInfo.CAMERA_FACING_FRONT) { mCamera = Camera.open(i); break; } } if (mCamera == null) { Log.d(TAG, "No front-facing camera found; opening default"); mCamera = Camera.open(); // opens first back-facing camera } if (mCamera == null) { throw new RuntimeException("Unable to open camera"); } Camera.Parameters parms = mCamera.getParameters(); CameraUtils.choosePreviewSize(parms, desiredWidth, desiredHeight); // Try to set the frame rate to a constant value. int thousandFps = CameraUtils.chooseFixedPreviewFps(parms, desiredFps * 1000); // Give the camera a hint that we're recording video. This can have a big // impact on frame rate. parms.setRecordingHint(true); mCamera.setParameters(parms); int[] fpsRange = new int[2]; Camera.Size mCameraPreviewSize = parms.getPreviewSize(); parms.getPreviewFpsRange(fpsRange); String previewFacts = mCameraPreviewSize.width + "x" + mCameraPreviewSize.height; if (fpsRange[0] == fpsRange[1]) { previewFacts += " @" + (fpsRange[0] / 1000.0) + "fps"; } else { previewFacts += " @[" + (fpsRange[0] / 1000.0) + " - " + (fpsRange[1] / 1000.0) + "] fps"; } Log.i(TAG, "Camera config: " + previewFacts); mCameraPreviewWidth = mCameraPreviewSize.width; mCameraPreviewHeight = mCameraPreviewSize.height; mMainHandler.sendCameraParams(mCameraPreviewWidth, mCameraPreviewHeight, thousandFps / 1000.0f); } /** * Stops camera preview, and releases the camera to the system. */ private void releaseCamera() { if (mCamera != null) { mCamera.stopPreview(); mCamera.release(); mCamera = null; Log.d(TAG, "releaseCamera -- done"); } } } /** * Handler for RenderThread. Used for messages sent from the UI thread to the render thread. * <p> * The object is created on the render thread, and the various "send" methods are called * from the UI thread. */ private static class RenderHandler extends Handler { private static final int MSG_SURFACE_AVAILABLE = 0; private static final int MSG_SURFACE_CHANGED = 1; private static final int MSG_SURFACE_DESTROYED = 2; private static final int MSG_SHUTDOWN = 3; private static final int MSG_FRAME_AVAILABLE = 4; private static final int MSG_ZOOM_VALUE = 5; private static final int MSG_SIZE_VALUE = 6; private static final int MSG_ROTATE_VALUE = 7; private static final int MSG_POSITION = 8; private static final int MSG_REDRAW = 9; // This shouldn't need to be a weak ref, since we'll go away when the Looper quits, // but no real harm in it. private WeakReference<RenderThread> mWeakRenderThread; /** * Call from render thread. */ public RenderHandler(RenderThread rt) { mWeakRenderThread = new WeakReference<RenderThread>(rt); } /** * Sends the "surface available" message. If the surface was newly created (i.e. * this is called from surfaceCreated()), set newSurface to true. If this is * being called during Activity startup for a previously-existing surface, set * newSurface to false. * <p> * The flag tells the caller whether or not it can expect a surfaceChanged() to * arrive very soon. * <p> * Call from UI thread. */ public void sendSurfaceAvailable(SurfaceHolder holder, boolean newSurface) { sendMessage(obtainMessage(MSG_SURFACE_AVAILABLE, newSurface ? 1 : 0, 0, holder)); } /** * Sends the "surface changed" message, forwarding what we got from the SurfaceHolder. * <p> * Call from UI thread. */ public void sendSurfaceChanged(@SuppressWarnings("unused") int format, int width, int height) { // ignore format sendMessage(obtainMessage(MSG_SURFACE_CHANGED, width, height)); } /** * Sends the "shutdown" message, which tells the render thread to halt. * <p> * Call from UI thread. */ public void sendSurfaceDestroyed() { sendMessage(obtainMessage(MSG_SURFACE_DESTROYED)); } /** * Sends the "shutdown" message, which tells the render thread to halt. * <p> * Call from UI thread. */ public void sendShutdown() { sendMessage(obtainMessage(MSG_SHUTDOWN)); } /** * Sends the "frame available" message. * <p> * Call from UI thread. */ public void sendFrameAvailable() { sendMessage(obtainMessage(MSG_FRAME_AVAILABLE)); } /** * Sends the "zoom value" message. "progress" should be 0-100. * <p> * Call from UI thread. */ public void sendZoomValue(int progress) { sendMessage(obtainMessage(MSG_ZOOM_VALUE, progress, 0)); } /** * Sends the "size value" message. "progress" should be 0-100. * <p> * Call from UI thread. */ public void sendSizeValue(int progress) { sendMessage(obtainMessage(MSG_SIZE_VALUE, progress, 0)); } /** * Sends the "rotate value" message. "progress" should be 0-100. * <p> * Call from UI thread. */ public void sendRotateValue(int progress) { sendMessage(obtainMessage(MSG_ROTATE_VALUE, progress, 0)); } /** * Sends the "position" message. Sets the position of the rect. * <p> * Call from UI thread. */ public void sendPosition(int x, int y) { sendMessage(obtainMessage(MSG_POSITION, x, y)); } /** * Sends the "redraw" message. Forces an immediate redraw. * <p> * Call from UI thread. */ public void sendRedraw() { sendMessage(obtainMessage(MSG_REDRAW)); } @Override // runs on RenderThread public void handleMessage(Message msg) { int what = msg.what; //Log.d(TAG, "RenderHandler [" + this + "]: what=" + what); RenderThread renderThread = mWeakRenderThread.get(); if (renderThread == null) { Log.w(TAG, "RenderHandler.handleMessage: weak ref is null"); return; } switch (what) { case MSG_SURFACE_AVAILABLE: renderThread.surfaceAvailable((SurfaceHolder) msg.obj, msg.arg1 != 0); break; case MSG_SURFACE_CHANGED: renderThread.surfaceChanged(msg.arg1, msg.arg2); break; case MSG_SURFACE_DESTROYED: renderThread.surfaceDestroyed(); break; case MSG_SHUTDOWN: renderThread.shutdown(); break; case MSG_FRAME_AVAILABLE: renderThread.frameAvailable(); break; case MSG_ZOOM_VALUE: renderThread.setZoom(msg.arg1); break; case MSG_SIZE_VALUE: renderThread.setSize(msg.arg1); break; case MSG_ROTATE_VALUE: renderThread.setRotate(msg.arg1); break; case MSG_POSITION: renderThread.setPosition(msg.arg1, msg.arg2); break; case MSG_REDRAW: renderThread.draw(); break; default: throw new RuntimeException("unknown message " + what); } } } }
googleapis/google-cloud-java
35,969
java-talent/proto-google-cloud-talent-v4beta1/src/main/java/com/google/cloud/talent/v4beta1/UpdateJobRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/talent/v4beta1/job_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.talent.v4beta1; /** * * * <pre> * Update job request. * </pre> * * Protobuf type {@code google.cloud.talent.v4beta1.UpdateJobRequest} */ public final class UpdateJobRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.talent.v4beta1.UpdateJobRequest) UpdateJobRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateJobRequest.newBuilder() to construct. private UpdateJobRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateJobRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateJobRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.talent.v4beta1.JobServiceProto .internal_static_google_cloud_talent_v4beta1_UpdateJobRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.talent.v4beta1.JobServiceProto .internal_static_google_cloud_talent_v4beta1_UpdateJobRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.talent.v4beta1.UpdateJobRequest.class, com.google.cloud.talent.v4beta1.UpdateJobRequest.Builder.class); } private int bitField0_; public static final int JOB_FIELD_NUMBER = 1; private com.google.cloud.talent.v4beta1.Job job_; /** * * * <pre> * Required. The Job to be updated. * </pre> * * <code>.google.cloud.talent.v4beta1.Job job = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the job field is set. */ @java.lang.Override public boolean hasJob() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The Job to be updated. * </pre> * * <code>.google.cloud.talent.v4beta1.Job job = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The job. */ @java.lang.Override public com.google.cloud.talent.v4beta1.Job getJob() { return job_ == null ? com.google.cloud.talent.v4beta1.Job.getDefaultInstance() : job_; } /** * * * <pre> * Required. The Job to be updated. * </pre> * * <code>.google.cloud.talent.v4beta1.Job job = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.talent.v4beta1.JobOrBuilder getJobOrBuilder() { return job_ == null ? com.google.cloud.talent.v4beta1.Job.getDefaultInstance() : job_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4beta1.UpdateJobRequest.update_mask] * is provided, only the specified fields in * [job][google.cloud.talent.v4beta1.UpdateJobRequest.job] are updated. * Otherwise all the fields are updated. * * A field mask to restrict the fields that are updated. Only * top level fields of [Job][google.cloud.talent.v4beta1.Job] are supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4beta1.UpdateJobRequest.update_mask] * is provided, only the specified fields in * [job][google.cloud.talent.v4beta1.UpdateJobRequest.job] are updated. * Otherwise all the fields are updated. * * A field mask to restrict the fields that are updated. Only * top level fields of [Job][google.cloud.talent.v4beta1.Job] are supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4beta1.UpdateJobRequest.update_mask] * is provided, only the specified fields in * [job][google.cloud.talent.v4beta1.UpdateJobRequest.job] are updated. * Otherwise all the fields are updated. * * A field mask to restrict the fields that are updated. Only * top level fields of [Job][google.cloud.talent.v4beta1.Job] are supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getJob()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getJob()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.talent.v4beta1.UpdateJobRequest)) { return super.equals(obj); } com.google.cloud.talent.v4beta1.UpdateJobRequest other = (com.google.cloud.talent.v4beta1.UpdateJobRequest) obj; if (hasJob() != other.hasJob()) return false; if (hasJob()) { if (!getJob().equals(other.getJob())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasJob()) { hash = (37 * hash) + JOB_FIELD_NUMBER; hash = (53 * hash) + getJob().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.talent.v4beta1.UpdateJobRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.talent.v4beta1.UpdateJobRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.talent.v4beta1.UpdateJobRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.talent.v4beta1.UpdateJobRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.talent.v4beta1.UpdateJobRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.talent.v4beta1.UpdateJobRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.talent.v4beta1.UpdateJobRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.talent.v4beta1.UpdateJobRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.talent.v4beta1.UpdateJobRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.talent.v4beta1.UpdateJobRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.talent.v4beta1.UpdateJobRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.talent.v4beta1.UpdateJobRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.talent.v4beta1.UpdateJobRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Update job request. * </pre> * * Protobuf type {@code google.cloud.talent.v4beta1.UpdateJobRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.talent.v4beta1.UpdateJobRequest) com.google.cloud.talent.v4beta1.UpdateJobRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.talent.v4beta1.JobServiceProto .internal_static_google_cloud_talent_v4beta1_UpdateJobRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.talent.v4beta1.JobServiceProto .internal_static_google_cloud_talent_v4beta1_UpdateJobRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.talent.v4beta1.UpdateJobRequest.class, com.google.cloud.talent.v4beta1.UpdateJobRequest.Builder.class); } // Construct using com.google.cloud.talent.v4beta1.UpdateJobRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getJobFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; job_ = null; if (jobBuilder_ != null) { jobBuilder_.dispose(); jobBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.talent.v4beta1.JobServiceProto .internal_static_google_cloud_talent_v4beta1_UpdateJobRequest_descriptor; } @java.lang.Override public com.google.cloud.talent.v4beta1.UpdateJobRequest getDefaultInstanceForType() { return com.google.cloud.talent.v4beta1.UpdateJobRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.talent.v4beta1.UpdateJobRequest build() { com.google.cloud.talent.v4beta1.UpdateJobRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.talent.v4beta1.UpdateJobRequest buildPartial() { com.google.cloud.talent.v4beta1.UpdateJobRequest result = new com.google.cloud.talent.v4beta1.UpdateJobRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.talent.v4beta1.UpdateJobRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.job_ = jobBuilder_ == null ? job_ : jobBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.talent.v4beta1.UpdateJobRequest) { return mergeFrom((com.google.cloud.talent.v4beta1.UpdateJobRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.talent.v4beta1.UpdateJobRequest other) { if (other == com.google.cloud.talent.v4beta1.UpdateJobRequest.getDefaultInstance()) return this; if (other.hasJob()) { mergeJob(other.getJob()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getJobFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.talent.v4beta1.Job job_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.talent.v4beta1.Job, com.google.cloud.talent.v4beta1.Job.Builder, com.google.cloud.talent.v4beta1.JobOrBuilder> jobBuilder_; /** * * * <pre> * Required. The Job to be updated. * </pre> * * <code>.google.cloud.talent.v4beta1.Job job = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the job field is set. */ public boolean hasJob() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The Job to be updated. * </pre> * * <code>.google.cloud.talent.v4beta1.Job job = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The job. */ public com.google.cloud.talent.v4beta1.Job getJob() { if (jobBuilder_ == null) { return job_ == null ? com.google.cloud.talent.v4beta1.Job.getDefaultInstance() : job_; } else { return jobBuilder_.getMessage(); } } /** * * * <pre> * Required. The Job to be updated. * </pre> * * <code>.google.cloud.talent.v4beta1.Job job = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setJob(com.google.cloud.talent.v4beta1.Job value) { if (jobBuilder_ == null) { if (value == null) { throw new NullPointerException(); } job_ = value; } else { jobBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The Job to be updated. * </pre> * * <code>.google.cloud.talent.v4beta1.Job job = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setJob(com.google.cloud.talent.v4beta1.Job.Builder builderForValue) { if (jobBuilder_ == null) { job_ = builderForValue.build(); } else { jobBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The Job to be updated. * </pre> * * <code>.google.cloud.talent.v4beta1.Job job = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeJob(com.google.cloud.talent.v4beta1.Job value) { if (jobBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && job_ != null && job_ != com.google.cloud.talent.v4beta1.Job.getDefaultInstance()) { getJobBuilder().mergeFrom(value); } else { job_ = value; } } else { jobBuilder_.mergeFrom(value); } if (job_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The Job to be updated. * </pre> * * <code>.google.cloud.talent.v4beta1.Job job = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearJob() { bitField0_ = (bitField0_ & ~0x00000001); job_ = null; if (jobBuilder_ != null) { jobBuilder_.dispose(); jobBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The Job to be updated. * </pre> * * <code>.google.cloud.talent.v4beta1.Job job = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.talent.v4beta1.Job.Builder getJobBuilder() { bitField0_ |= 0x00000001; onChanged(); return getJobFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The Job to be updated. * </pre> * * <code>.google.cloud.talent.v4beta1.Job job = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.talent.v4beta1.JobOrBuilder getJobOrBuilder() { if (jobBuilder_ != null) { return jobBuilder_.getMessageOrBuilder(); } else { return job_ == null ? com.google.cloud.talent.v4beta1.Job.getDefaultInstance() : job_; } } /** * * * <pre> * Required. The Job to be updated. * </pre> * * <code>.google.cloud.talent.v4beta1.Job job = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.talent.v4beta1.Job, com.google.cloud.talent.v4beta1.Job.Builder, com.google.cloud.talent.v4beta1.JobOrBuilder> getJobFieldBuilder() { if (jobBuilder_ == null) { jobBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.talent.v4beta1.Job, com.google.cloud.talent.v4beta1.Job.Builder, com.google.cloud.talent.v4beta1.JobOrBuilder>( getJob(), getParentForChildren(), isClean()); job_ = null; } return jobBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4beta1.UpdateJobRequest.update_mask] * is provided, only the specified fields in * [job][google.cloud.talent.v4beta1.UpdateJobRequest.job] are updated. * Otherwise all the fields are updated. * * A field mask to restrict the fields that are updated. Only * top level fields of [Job][google.cloud.talent.v4beta1.Job] are supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4beta1.UpdateJobRequest.update_mask] * is provided, only the specified fields in * [job][google.cloud.talent.v4beta1.UpdateJobRequest.job] are updated. * Otherwise all the fields are updated. * * A field mask to restrict the fields that are updated. Only * top level fields of [Job][google.cloud.talent.v4beta1.Job] are supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4beta1.UpdateJobRequest.update_mask] * is provided, only the specified fields in * [job][google.cloud.talent.v4beta1.UpdateJobRequest.job] are updated. * Otherwise all the fields are updated. * * A field mask to restrict the fields that are updated. Only * top level fields of [Job][google.cloud.talent.v4beta1.Job] are supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4beta1.UpdateJobRequest.update_mask] * is provided, only the specified fields in * [job][google.cloud.talent.v4beta1.UpdateJobRequest.job] are updated. * Otherwise all the fields are updated. * * A field mask to restrict the fields that are updated. Only * top level fields of [Job][google.cloud.talent.v4beta1.Job] are supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4beta1.UpdateJobRequest.update_mask] * is provided, only the specified fields in * [job][google.cloud.talent.v4beta1.UpdateJobRequest.job] are updated. * Otherwise all the fields are updated. * * A field mask to restrict the fields that are updated. Only * top level fields of [Job][google.cloud.talent.v4beta1.Job] are supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4beta1.UpdateJobRequest.update_mask] * is provided, only the specified fields in * [job][google.cloud.talent.v4beta1.UpdateJobRequest.job] are updated. * Otherwise all the fields are updated. * * A field mask to restrict the fields that are updated. Only * top level fields of [Job][google.cloud.talent.v4beta1.Job] are supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4beta1.UpdateJobRequest.update_mask] * is provided, only the specified fields in * [job][google.cloud.talent.v4beta1.UpdateJobRequest.job] are updated. * Otherwise all the fields are updated. * * A field mask to restrict the fields that are updated. Only * top level fields of [Job][google.cloud.talent.v4beta1.Job] are supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4beta1.UpdateJobRequest.update_mask] * is provided, only the specified fields in * [job][google.cloud.talent.v4beta1.UpdateJobRequest.job] are updated. * Otherwise all the fields are updated. * * A field mask to restrict the fields that are updated. Only * top level fields of [Job][google.cloud.talent.v4beta1.Job] are supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Strongly recommended for the best service experience. * * If [update_mask][google.cloud.talent.v4beta1.UpdateJobRequest.update_mask] * is provided, only the specified fields in * [job][google.cloud.talent.v4beta1.UpdateJobRequest.job] are updated. * Otherwise all the fields are updated. * * A field mask to restrict the fields that are updated. Only * top level fields of [Job][google.cloud.talent.v4beta1.Job] are supported. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.talent.v4beta1.UpdateJobRequest) } // @@protoc_insertion_point(class_scope:google.cloud.talent.v4beta1.UpdateJobRequest) private static final com.google.cloud.talent.v4beta1.UpdateJobRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.talent.v4beta1.UpdateJobRequest(); } public static com.google.cloud.talent.v4beta1.UpdateJobRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateJobRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateJobRequest>() { @java.lang.Override public UpdateJobRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateJobRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateJobRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.talent.v4beta1.UpdateJobRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/flink
36,051
flink-tests/src/test/java/org/apache/flink/api/datastream/DataStreamBatchExecutionITCase.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.flink.api.datastream; import org.apache.flink.api.common.RuntimeExecutionMode; import org.apache.flink.api.common.eventtime.WatermarkStrategy; import org.apache.flink.api.common.functions.RichMapFunction; import org.apache.flink.api.common.state.BroadcastState; import org.apache.flink.api.common.state.ListState; import org.apache.flink.api.common.state.ListStateDescriptor; import org.apache.flink.api.common.state.MapStateDescriptor; import org.apache.flink.api.common.state.ReadOnlyBroadcastState; import org.apache.flink.api.common.state.ValueStateDescriptor; import org.apache.flink.api.common.typeinfo.BasicTypeInfo; import org.apache.flink.api.common.typeutils.base.StringSerializer; import org.apache.flink.api.java.tuple.Tuple2; import org.apache.flink.runtime.testutils.MiniClusterResourceConfiguration; import org.apache.flink.streaming.api.datastream.BroadcastStream; import org.apache.flink.streaming.api.datastream.DataStream; import org.apache.flink.streaming.api.datastream.DataStreamSource; import org.apache.flink.streaming.api.datastream.KeyedStream; import org.apache.flink.streaming.api.datastream.MultipleConnectedStreams; import org.apache.flink.streaming.api.datastream.SingleOutputStreamOperator; import org.apache.flink.streaming.api.environment.StreamExecutionEnvironment; import org.apache.flink.streaming.api.functions.co.BroadcastProcessFunction; import org.apache.flink.streaming.api.functions.co.KeyedBroadcastProcessFunction; import org.apache.flink.streaming.api.operators.AbstractInput; import org.apache.flink.streaming.api.operators.AbstractStreamOperator; import org.apache.flink.streaming.api.operators.AbstractStreamOperatorFactory; import org.apache.flink.streaming.api.operators.AbstractStreamOperatorV2; import org.apache.flink.streaming.api.operators.Input; import org.apache.flink.streaming.api.operators.MultipleInputStreamOperator; import org.apache.flink.streaming.api.operators.StreamOperator; import org.apache.flink.streaming.api.operators.StreamOperatorParameters; import org.apache.flink.streaming.api.operators.TwoInputStreamOperator; import org.apache.flink.streaming.api.transformations.KeyedMultipleInputTransformation; import org.apache.flink.streaming.api.transformations.TwoInputTransformation; import org.apache.flink.streaming.runtime.streamrecord.StreamRecord; import org.apache.flink.streaming.util.RestartStrategyUtils; import org.apache.flink.test.util.MiniClusterWithClientResource; import org.apache.flink.util.CloseableIterator; import org.apache.flink.util.CollectionUtil; import org.apache.flink.util.Collector; import org.junit.ClassRule; import org.junit.Test; import java.util.Arrays; import java.util.List; import static org.apache.flink.util.CollectionUtil.iteratorToList; import static org.hamcrest.CoreMatchers.equalTo; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.junit.Assert.assertThat; /** * Integration test for {@link RuntimeExecutionMode#BATCH} execution on the DataStream API. * * <p>We use a {@link MiniClusterWithClientResource} with a single TaskManager with 1 slot to verify * that programs in BATCH execution mode can be executed in stages. */ public class DataStreamBatchExecutionITCase { private static final int DEFAULT_PARALLELISM = 1; @ClassRule public static MiniClusterWithClientResource miniClusterResource = new MiniClusterWithClientResource( new MiniClusterResourceConfiguration.Builder() .setNumberTaskManagers(1) .setNumberSlotsPerTaskManager(DEFAULT_PARALLELISM) .build()); /** * We induce a failure in the last mapper. In BATCH execution mode the part of the pipeline * before the key-by should not be re-executed. Only the part after that will restart. We check * that by suffixing the attempt number to records and asserting the correct number. */ @Test public void batchFailoverWithKeyByBarrier() throws Exception { final StreamExecutionEnvironment env = getExecutionEnvironment(); DataStreamSource<String> source = env.fromData("foo", "bar"); SingleOutputStreamOperator<String> mapped = source.map(new SuffixAttemptId("a")) .map(new SuffixAttemptId("b")) .keyBy(in -> in) .map(new SuffixAttemptId("c")) .map(new OnceFailingMapper("d")); try (CloseableIterator<String> result = mapped.executeAndCollect()) { // only the operators after the key-by "barrier" are restarted and will have the // "attempt 1" suffix assertThat( iteratorToList(result), containsInAnyOrder("foo-a0-b0-c1-d1", "bar-a0-b0-c1-d1")); } } /** * We induce a failure in the last mapper. In BATCH execution mode the part of the pipeline * before the rebalance should not be re-executed. Only the part after that will restart. We * check that by suffixing the attempt number to records and asserting the correct number. */ @Test public void batchFailoverWithRebalanceBarrier() throws Exception { final StreamExecutionEnvironment env = getExecutionEnvironment(); DataStreamSource<String> source = env.fromData("foo", "bar"); SingleOutputStreamOperator<String> mapped = source.map(new SuffixAttemptId("a")) .map(new SuffixAttemptId("b")) .rebalance() .map(new SuffixAttemptId("c")) .map(new OnceFailingMapper("d")); try (CloseableIterator<String> result = mapped.executeAndCollect()) { // only the operators after the rebalance "barrier" are restarted and will have the // "attempt 1" suffix assertThat( iteratorToList(result), containsInAnyOrder("foo-a0-b0-c1-d1", "bar-a0-b0-c1-d1")); } } /** * We induce a failure in the last mapper. In BATCH execution mode the part of the pipeline * before the rescale should not be re-executed. Only the part after that will restart. We check * that by suffixing the attempt number to records and asserting the correct number. */ @Test public void batchFailoverWithRescaleBarrier() throws Exception { final StreamExecutionEnvironment env = getExecutionEnvironment(); DataStreamSource<String> source = env.fromData("foo", "bar"); env.setParallelism(1); SingleOutputStreamOperator<String> mapped = source.map(new SuffixAttemptId("a")) .map(new SuffixAttemptId("b")) .rescale() .map(new SuffixAttemptId("c")) .setParallelism(2) .map(new OnceFailingMapper("d")) .setParallelism(2); try (CloseableIterator<String> result = mapped.executeAndCollect()) { // only the operators after the rescale "barrier" are restarted and will have the // "attempt 1" suffix assertThat( iteratorToList(result), containsInAnyOrder("foo-a0-b0-c1-d1", "bar-a0-b0-c1-d1")); } } @Test public void batchReduceSingleResultPerKey() throws Exception { StreamExecutionEnvironment env = getExecutionEnvironment(); DataStreamSource<Long> numbers = env.fromSequence(0, 10); // send all records into a single reducer KeyedStream<Long, Long> stream = numbers.keyBy(i -> i % 2); DataStream<Long> sums = stream.reduce(Long::sum); try (CloseableIterator<Long> sumsIterator = sums.executeAndCollect()) { List<Long> results = CollectionUtil.iteratorToList(sumsIterator); assertThat(results, equalTo(Arrays.asList(30L, 25L))); } } @Test public void batchSumSingleResultPerKey() throws Exception { StreamExecutionEnvironment env = getExecutionEnvironment(); DataStreamSource<Long> numbers = env.fromSequence(0, 10); // send all records into a single reducer KeyedStream<Long, Long> stream = numbers.keyBy(i -> i % 2); DataStream<Long> sums = stream.sum(0); try (CloseableIterator<Long> sumsIterator = sums.executeAndCollect()) { List<Long> results = CollectionUtil.iteratorToList(sumsIterator); assertThat(results, equalTo(Arrays.asList(30L, 25L))); } } /** * Verifies that all regular input is processed before keyed input. * * <p>Here, the first input is keyed while the second input is not keyed. */ @Test public void batchKeyedNonKeyedTwoInputOperator() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(1); env.setRuntimeMode(RuntimeExecutionMode.BATCH); DataStream<Tuple2<String, Integer>> keyedInput = env.fromData( Tuple2.of("regular2", 4), Tuple2.of("regular1", 3), Tuple2.of("regular1", 2), Tuple2.of("regular2", 1)) .assignTimestampsAndWatermarks( WatermarkStrategy.<Tuple2<String, Integer>>forMonotonousTimestamps() .withTimestampAssigner((in, ts) -> in.f1)); DataStream<Tuple2<String, Integer>> regularInput = env.fromData( Tuple2.of("regular4", 4), Tuple2.of("regular3", 3), Tuple2.of("regular3", 2), Tuple2.of("regular4", 1)) .assignTimestampsAndWatermarks( WatermarkStrategy.<Tuple2<String, Integer>>forMonotonousTimestamps() .withTimestampAssigner((in, ts) -> in.f1)); DataStream<String> result = keyedInput .keyBy(in -> in.f0) .connect(regularInput) .transform( "operator", BasicTypeInfo.STRING_TYPE_INFO, new TwoInputIdentityOperator()); try (CloseableIterator<String> resultIterator = result.executeAndCollect()) { List<String> results = CollectionUtil.iteratorToList(resultIterator); assertThat( results, equalTo( Arrays.asList( "(regular4,4)", "(regular3,3)", "(regular3,2)", "(regular4,1)", "(regular1,2)", "(regular1,3)", "(regular2,1)", "(regular2,4)"))); } } /** * Verifies that all regular input is processed before keyed input. * * <p>Here, the first input is not keyed while the second input is keyed. */ @Test public void batchNonKeyedKeyedTwoInputOperator() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(1); env.setRuntimeMode(RuntimeExecutionMode.BATCH); DataStream<Tuple2<String, Integer>> keyedInput = env.fromData( Tuple2.of("regular2", 4), Tuple2.of("regular1", 3), Tuple2.of("regular1", 2), Tuple2.of("regular2", 1)) .assignTimestampsAndWatermarks( WatermarkStrategy.<Tuple2<String, Integer>>forMonotonousTimestamps() .withTimestampAssigner((in, ts) -> in.f1)); DataStream<Tuple2<String, Integer>> regularInput = env.fromData( Tuple2.of("regular4", 4), Tuple2.of("regular3", 3), Tuple2.of("regular3", 2), Tuple2.of("regular4", 1)) .assignTimestampsAndWatermarks( WatermarkStrategy.<Tuple2<String, Integer>>forMonotonousTimestamps() .withTimestampAssigner((in, ts) -> in.f1)); DataStream<String> result = regularInput .connect(keyedInput.keyBy(in -> in.f0)) .transform( "operator", BasicTypeInfo.STRING_TYPE_INFO, new TwoInputIdentityOperator()); try (CloseableIterator<String> resultIterator = result.executeAndCollect()) { List<String> results = CollectionUtil.iteratorToList(resultIterator); assertThat( results, equalTo( Arrays.asList( "(regular4,4)", "(regular3,3)", "(regular3,2)", "(regular4,1)", "(regular1,2)", "(regular1,3)", "(regular2,1)", "(regular2,4)"))); } } /** Verifies that all broadcast input is processed before keyed input. */ @Test public void batchKeyedBroadcastExecution() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(1); env.setRuntimeMode(RuntimeExecutionMode.BATCH); DataStream<Tuple2<String, Integer>> bcInput = env.fromData(Tuple2.of("bc1", 1), Tuple2.of("bc2", 2), Tuple2.of("bc3", 3)) .assignTimestampsAndWatermarks( WatermarkStrategy.<Tuple2<String, Integer>>forMonotonousTimestamps() .withTimestampAssigner((in, ts) -> in.f1)); DataStream<Tuple2<String, Integer>> regularInput = env.fromData( Tuple2.of("regular1", 1), Tuple2.of("regular1", 2), Tuple2.of("regular2", 2), Tuple2.of("regular1", 3), Tuple2.of("regular1", 4), Tuple2.of("regular1", 3), Tuple2.of("regular2", 5), Tuple2.of("regular1", 5), Tuple2.of("regular2", 3), Tuple2.of("regular1", 3)) .assignTimestampsAndWatermarks( WatermarkStrategy.<Tuple2<String, Integer>>forMonotonousTimestamps() .withTimestampAssigner((in, ts) -> in.f1)); BroadcastStream<Tuple2<String, Integer>> broadcastStream = bcInput.broadcast(STATE_DESCRIPTOR); DataStream<String> result = regularInput .keyBy((input) -> input.f0) .connect(broadcastStream) .process(new TestKeyedBroadcastFunction()); try (CloseableIterator<String> resultIterator = result.executeAndCollect()) { List<String> results = CollectionUtil.iteratorToList(resultIterator); assertThat( results, equalTo( Arrays.asList( "(regular1,1): [bc2=bc2, bc1=bc1, bc3=bc3]", "(regular1,2): [bc2=bc2, bc1=bc1, bc3=bc3]", "(regular1,3): [bc2=bc2, bc1=bc1, bc3=bc3]", "(regular1,3): [bc2=bc2, bc1=bc1, bc3=bc3]", "(regular1,3): [bc2=bc2, bc1=bc1, bc3=bc3]", "(regular1,4): [bc2=bc2, bc1=bc1, bc3=bc3]", "(regular1,5): [bc2=bc2, bc1=bc1, bc3=bc3]", "(regular2,2): [bc2=bc2, bc1=bc1, bc3=bc3]", "(regular2,3): [bc2=bc2, bc1=bc1, bc3=bc3]", "(regular2,5): [bc2=bc2, bc1=bc1, bc3=bc3]"))); } } /** Verifies that all broadcast input is processed before regular input. */ @Test public void batchBroadcastExecution() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(1); env.setRuntimeMode(RuntimeExecutionMode.BATCH); DataStream<Tuple2<String, Integer>> bcInput = env.fromData(Tuple2.of("bc1", 1), Tuple2.of("bc2", 2), Tuple2.of("bc3", 3)) .assignTimestampsAndWatermarks( WatermarkStrategy.<Tuple2<String, Integer>>forMonotonousTimestamps() .withTimestampAssigner((in, ts) -> in.f1)); DataStream<Tuple2<String, Integer>> regularInput = env.fromData( Tuple2.of("regular1", 1), Tuple2.of("regular1", 2), Tuple2.of("regular1", 3), Tuple2.of("regular1", 4), Tuple2.of("regular1", 3), Tuple2.of("regular1", 5), Tuple2.of("regular1", 3)) .assignTimestampsAndWatermarks( WatermarkStrategy.<Tuple2<String, Integer>>forMonotonousTimestamps() .withTimestampAssigner((in, ts) -> in.f1)); BroadcastStream<Tuple2<String, Integer>> broadcastStream = bcInput.broadcast(STATE_DESCRIPTOR); DataStream<String> result = regularInput.connect(broadcastStream).process(new TestBroadcastFunction()); try (CloseableIterator<String> resultIterator = result.executeAndCollect()) { List<String> results = CollectionUtil.iteratorToList(resultIterator); // regular, that is non-keyed input is not sorted by timestamp. For keyed inputs // this is a by-product of the grouping/sorting we use to get the keyed groups. assertThat( results, equalTo( Arrays.asList( "(regular1,1): [bc2=bc2, bc1=bc1, bc3=bc3]", "(regular1,2): [bc2=bc2, bc1=bc1, bc3=bc3]", "(regular1,3): [bc2=bc2, bc1=bc1, bc3=bc3]", "(regular1,4): [bc2=bc2, bc1=bc1, bc3=bc3]", "(regular1,3): [bc2=bc2, bc1=bc1, bc3=bc3]", "(regular1,5): [bc2=bc2, bc1=bc1, bc3=bc3]", "(regular1,3): [bc2=bc2, bc1=bc1, bc3=bc3]"))); } } @Test public void batchMixedKeyedAndNonKeyedTwoInputOperator() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setParallelism(1); env.setRuntimeMode(RuntimeExecutionMode.BATCH); DataStream<Tuple2<String, Integer>> bcInput = env.fromData(Tuple2.of("bc3", 3), Tuple2.of("bc2", 2), Tuple2.of("bc1", 1)) .assignTimestampsAndWatermarks( WatermarkStrategy.<Tuple2<String, Integer>>forMonotonousTimestamps() .withTimestampAssigner((in, ts) -> in.f1)) .broadcast(); DataStream<Tuple2<String, Integer>> regularInput = env.fromData( Tuple2.of("regular1", 1), Tuple2.of("regular1", 2), Tuple2.of("regular1", 3), Tuple2.of("regular1", 4), Tuple2.of("regular2", 3), Tuple2.of("regular2", 5), Tuple2.of("regular1", 3)) .assignTimestampsAndWatermarks( WatermarkStrategy.<Tuple2<String, Integer>>forMonotonousTimestamps() .withTimestampAssigner((in, ts) -> in.f1)) .keyBy(input -> input.f0); TwoInputTransformation<Tuple2<String, Integer>, Tuple2<String, Integer>, String> twoInputTransformation = new TwoInputTransformation<>( regularInput.getTransformation(), bcInput.getTransformation(), "operator", new TestMixedTwoInputOperator(), BasicTypeInfo.STRING_TYPE_INFO, 1); twoInputTransformation.setStateKeyType(BasicTypeInfo.STRING_TYPE_INFO); twoInputTransformation.setStateKeySelectors(input -> input.f0, null); DataStream<String> result = new DataStream<>(env, twoInputTransformation); try (CloseableIterator<String> resultIterator = result.executeAndCollect()) { List<String> results = CollectionUtil.iteratorToList(resultIterator); assertThat( results, equalTo( Arrays.asList( "(regular1,1): [bc3, bc2, bc1]", "(regular1,2): [bc3, bc2, bc1]", "(regular1,3): [bc3, bc2, bc1]", "(regular1,3): [bc3, bc2, bc1]", "(regular1,4): [bc3, bc2, bc1]", "(regular2,3): [bc3, bc2, bc1]", "(regular2,5): [bc3, bc2, bc1]"))); } } @Test public void batchMixedKeyedAndNonKeyedMultiInputOperator() throws Exception { StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setRuntimeMode(RuntimeExecutionMode.BATCH); DataStream<Tuple2<String, Integer>> bc1Input = env.fromData(Tuple2.of("bc3", 3), Tuple2.of("bc2", 2)) .assignTimestampsAndWatermarks( WatermarkStrategy.<Tuple2<String, Integer>>forMonotonousTimestamps() .withTimestampAssigner((in, ts) -> in.f1)) .broadcast(); DataStream<Tuple2<String, Integer>> bc2Input = env.fromData(Tuple2.of("bc1", 1)) .assignTimestampsAndWatermarks( WatermarkStrategy.<Tuple2<String, Integer>>forMonotonousTimestamps() .withTimestampAssigner((in, ts) -> in.f1)) .broadcast(); DataStream<Tuple2<String, Integer>> regularInput = env.fromData( Tuple2.of("regular1", 1), Tuple2.of("regular1", 2), Tuple2.of("regular1", 3), Tuple2.of("regular1", 4), Tuple2.of("regular2", 3), Tuple2.of("regular2", 5), Tuple2.of("regular1", 3)) .assignTimestampsAndWatermarks( WatermarkStrategy.<Tuple2<String, Integer>>forMonotonousTimestamps() .withTimestampAssigner((in, ts) -> in.f1)) .keyBy(input -> input.f0); KeyedMultipleInputTransformation<String> multipleInputTransformation = new KeyedMultipleInputTransformation<>( "operator", mixedInputsOperatorFactory, BasicTypeInfo.STRING_TYPE_INFO, 1, BasicTypeInfo.STRING_TYPE_INFO); multipleInputTransformation.addInput( regularInput.getTransformation(), input -> ((Tuple2<String, Integer>) input).f0); multipleInputTransformation.addInput(bc1Input.getTransformation(), null); multipleInputTransformation.addInput(bc2Input.getTransformation(), null); DataStream<String> result = new MultipleConnectedStreams(env).transform(multipleInputTransformation); try (CloseableIterator<String> resultIterator = result.executeAndCollect()) { List<String> results = CollectionUtil.iteratorToList(resultIterator); assertThat( results, equalTo( Arrays.asList( "(regular1,1): [bc3, bc2, bc1]", "(regular1,2): [bc3, bc2, bc1]", "(regular1,3): [bc3, bc2, bc1]", "(regular1,3): [bc3, bc2, bc1]", "(regular1,4): [bc3, bc2, bc1]", "(regular2,3): [bc3, bc2, bc1]", "(regular2,5): [bc3, bc2, bc1]"))); } } private StreamExecutionEnvironment getExecutionEnvironment() { final StreamExecutionEnvironment env = StreamExecutionEnvironment.getExecutionEnvironment(); env.setRuntimeMode(RuntimeExecutionMode.BATCH); env.setParallelism(1); // trick the collecting sink into working even in the face of failures 🙏 env.enableCheckpointing(42); RestartStrategyUtils.configureFixedDelayRestartStrategy(env, 10, 1L); return env; } /** Adds the attempt number as a suffix. */ public static class SuffixAttemptId extends RichMapFunction<String, String> { private final String suffix; public SuffixAttemptId(String suffix) { this.suffix = suffix; } @Override public String map(String value) { return value + "-" + suffix + getRuntimeContext().getTaskInfo().getAttemptNumber(); } } /** * Adds the attempt number as a suffix. * * <p>Also fails by throwing an exception on the first attempt. */ public static class OnceFailingMapper extends RichMapFunction<String, String> { private final String suffix; public OnceFailingMapper(String suffix) { this.suffix = suffix; } @Override public String map(String value) throws Exception { if (getRuntimeContext().getTaskInfo().getAttemptNumber() <= 0) { throw new RuntimeException("FAILING"); } return value + "-" + suffix + getRuntimeContext().getTaskInfo().getAttemptNumber(); } } static final MapStateDescriptor<String, String> STATE_DESCRIPTOR = new MapStateDescriptor<>( "bc-input", StringSerializer.INSTANCE, StringSerializer.INSTANCE); static final ValueStateDescriptor<String> KEYED_STATE_DESCRIPTOR = new ValueStateDescriptor<>("keyed-state", StringSerializer.INSTANCE); static final ListStateDescriptor<String> LIST_STATE_DESCRIPTOR = new ListStateDescriptor<>("bc-list-input", StringSerializer.INSTANCE); private static class TestKeyedBroadcastFunction extends KeyedBroadcastProcessFunction< String, Tuple2<String, Integer>, Tuple2<String, Integer>, String> { @Override public void processElement( Tuple2<String, Integer> value, ReadOnlyContext ctx, Collector<String> out) throws Exception { ReadOnlyBroadcastState<String, String> state = ctx.getBroadcastState(STATE_DESCRIPTOR); out.collect(value + ": " + state.immutableEntries().toString()); } @Override public void processBroadcastElement( Tuple2<String, Integer> value, Context ctx, Collector<String> out) throws Exception { BroadcastState<String, String> state = ctx.getBroadcastState(STATE_DESCRIPTOR); state.put(value.f0, value.f0); // iterating over keys is a no-op in BATCH execution mode ctx.applyToKeyedState( KEYED_STATE_DESCRIPTOR, (key, state1) -> { throw new RuntimeException("Shouldn't happen"); }); } } private static class TestBroadcastFunction extends BroadcastProcessFunction< Tuple2<String, Integer>, Tuple2<String, Integer>, String> { @Override public void processElement( Tuple2<String, Integer> value, ReadOnlyContext ctx, Collector<String> out) throws Exception { ReadOnlyBroadcastState<String, String> state = ctx.getBroadcastState(STATE_DESCRIPTOR); out.collect(value + ": " + state.immutableEntries().toString()); } @Override public void processBroadcastElement( Tuple2<String, Integer> value, Context ctx, Collector<String> out) throws Exception { BroadcastState<String, String> state = ctx.getBroadcastState(STATE_DESCRIPTOR); state.put(value.f0, value.f0); } } private static class TwoInputIdentityOperator extends AbstractStreamOperator<String> implements TwoInputStreamOperator< Tuple2<String, Integer>, Tuple2<String, Integer>, String> { @Override public void processElement1(StreamRecord<Tuple2<String, Integer>> element) throws Exception { output.collect( new StreamRecord<>(element.getValue().toString(), element.getTimestamp())); } @Override public void processElement2(StreamRecord<Tuple2<String, Integer>> element) throws Exception { output.collect( new StreamRecord<>(element.getValue().toString(), element.getTimestamp())); } } private static final class TestMixedTwoInputOperator extends AbstractStreamOperator<String> implements TwoInputStreamOperator< Tuple2<String, Integer>, Tuple2<String, Integer>, String> { @Override public void processElement1(StreamRecord<Tuple2<String, Integer>> element) throws Exception { ListState<String> operatorState = getOperatorStateBackend().getListState(LIST_STATE_DESCRIPTOR); output.collect( new StreamRecord<>(element.getValue() + ": " + operatorState.get().toString())); } @Override public void processElement2(StreamRecord<Tuple2<String, Integer>> element) throws Exception { ListState<String> operatorState = getOperatorStateBackend().getListState(LIST_STATE_DESCRIPTOR); operatorState.add(element.getValue().f0); } } @SuppressWarnings({"unchecked", "rawtypes"}) private static final AbstractStreamOperatorFactory<String> mixedInputsOperatorFactory = new AbstractStreamOperatorFactory<String>() { @Override public <T extends StreamOperator<String>> T createStreamOperator( StreamOperatorParameters<String> parameters) { return (T) new TestMixedMultipleInputOperator(parameters); } @Override public Class<? extends StreamOperator> getStreamOperatorClass( ClassLoader classLoader) { return TestMixedMultipleInputOperator.class; } }; private static class TestMixedMultipleInputOperator extends AbstractStreamOperatorV2<String> implements MultipleInputStreamOperator<String> { public TestMixedMultipleInputOperator(StreamOperatorParameters<String> parameters) { super(parameters, 3); } @Override @SuppressWarnings({"rawtypes"}) public List<Input> getInputs() { return Arrays.asList( new AbstractInput<Tuple2<String, Integer>, String>(this, 1) { @Override public void processElement(StreamRecord<Tuple2<String, Integer>> element) throws Exception { ListState<String> operatorState = getOperatorStateBackend().getListState(LIST_STATE_DESCRIPTOR); output.collect( new StreamRecord<>( element.getValue() + ": " + operatorState.get().toString())); } }, new AbstractInput<Tuple2<String, Integer>, String>(this, 2) { @Override public void processElement(StreamRecord<Tuple2<String, Integer>> element) throws Exception { ListState<String> operatorState = getOperatorStateBackend().getListState(LIST_STATE_DESCRIPTOR); operatorState.add(element.getValue().f0); } }, new AbstractInput<Tuple2<String, Integer>, String>(this, 3) { @Override public void processElement(StreamRecord<Tuple2<String, Integer>> element) throws Exception { ListState<String> operatorState = getOperatorStateBackend().getListState(LIST_STATE_DESCRIPTOR); operatorState.add(element.getValue().f0); } }); } } }
apache/jackrabbit
35,761
jackrabbit-core/src/main/java/org/apache/jackrabbit/core/persistence/xml/XMLPersistenceManager.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.core.persistence.xml; import org.apache.commons.io.IOUtils; import org.apache.jackrabbit.core.id.NodeId; import org.apache.jackrabbit.core.id.PropertyId; import org.apache.jackrabbit.core.fs.BasedFileSystem; import org.apache.jackrabbit.core.fs.FileSystem; import org.apache.jackrabbit.core.fs.FileSystemException; import org.apache.jackrabbit.core.fs.FileSystemResource; import org.apache.jackrabbit.core.fs.local.LocalFileSystem; import org.apache.jackrabbit.core.persistence.AbstractPersistenceManager; import org.apache.jackrabbit.core.state.ItemStateException; import org.apache.jackrabbit.core.state.NoSuchItemStateException; import org.apache.jackrabbit.core.state.NodeReferences; import org.apache.jackrabbit.core.state.NodeState; import org.apache.jackrabbit.core.persistence.PMContext; import org.apache.jackrabbit.core.state.PropertyState; import org.apache.jackrabbit.core.state.ChildNodeEntry; import org.apache.jackrabbit.core.persistence.util.BLOBStore; import org.apache.jackrabbit.core.persistence.util.FileSystemBLOBStore; import org.apache.jackrabbit.core.persistence.util.ResourceBasedBLOBStore; import org.apache.jackrabbit.core.util.DOMWalker; import org.apache.jackrabbit.core.value.InternalValue; import org.apache.jackrabbit.spi.Name; import org.apache.jackrabbit.spi.NameFactory; import org.apache.jackrabbit.util.Text; import org.apache.jackrabbit.spi.commons.name.NameFactoryImpl; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.jcr.PropertyType; import java.io.BufferedWriter; import java.io.File; import java.io.IOException; import java.io.InputStream; import java.io.OutputStream; import java.io.OutputStreamWriter; import java.io.UnsupportedEncodingException; import java.io.Writer; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.security.MessageDigest; import java.security.NoSuchAlgorithmException; import java.util.ArrayList; import java.util.HashSet; import java.util.Set; /** * <code>XMLPersistenceManager</code> is a <code>FileSystem</code>-based * <code>PersistenceManager</code> that persists <code>ItemState</code> * and <code>NodeReferences</code> objects in XML format. * * @deprecated Please migrate to a bundle persistence manager * (<a href="https://issues.apache.org/jira/browse/JCR-2802">JCR-2802</a>) */ @Deprecated public class XMLPersistenceManager extends AbstractPersistenceManager { private static Logger log = LoggerFactory.getLogger(XMLPersistenceManager.class); /** * hexdigits for toString */ private static final char[] HEXDIGITS = "0123456789abcdef".toCharArray(); /** * The default encoding used in serialization */ public static final Charset DEFAULT_ENCODING = StandardCharsets.UTF_8; /** * The XML elements and attributes used in serialization */ private static final String NODE_ELEMENT = "node"; private static final String UUID_ATTRIBUTE = "uuid"; private static final String NODETYPE_ATTRIBUTE = "nodeType"; private static final String PARENTUUID_ATTRIBUTE = "parentUUID"; private static final String MODCOUNT_ATTRIBUTE = "modCount"; private static final String MIXINTYPES_ELEMENT = "mixinTypes"; private static final String MIXINTYPE_ELEMENT = "mixinType"; private static final String PROPERTIES_ELEMENT = "properties"; private static final String PROPERTY_ELEMENT = "property"; private static final String NAME_ATTRIBUTE = "name"; private static final String TYPE_ATTRIBUTE = "type"; private static final String MULTIVALUED_ATTRIBUTE = "multiValued"; private static final String VALUES_ELEMENT = "values"; private static final String VALUE_ELEMENT = "value"; private static final String NODES_ELEMENT = "nodes"; private static final String NODEREFERENCES_ELEMENT = "references"; private static final String TARGETID_ATTRIBUTE = "targetId"; private static final String NODEREFERENCE_ELEMENT = "reference"; private static final String PROPERTYID_ATTRIBUTE = "propertyId"; private static final String NODEFILENAME = ".node.xml"; private static final String NODEREFSFILENAME = ".references.xml"; private boolean initialized; // file system where the item state is stored private FileSystem itemStateFS; // file system where BLOB data is stored private FileSystem blobFS; // BLOBStore that manages BLOB data in the file system private BLOBStore blobStore; /** * Template for the subdirectory path for the files associated with * a single node. The template is processed by replacing each * "<code>x</code>" with the next hex digit in the UUID string. * All other characters in the template are used as-is. */ private String nodePathTemplate = "xxxx/xxxx/xxxxxxxxxxxxxxxxxxxxxxxx"; private final NameFactory factory; /** * Creates a new <code>XMLPersistenceManager</code> instance. */ public XMLPersistenceManager() { initialized = false; factory = NameFactoryImpl.getInstance(); } /** * Returns the node path template. * * @return node path template */ public String getNodePathTemplate() { return nodePathTemplate; } /** * Sets the node path template. * * @param template node path template */ public void setNodePathTemplate(String template) { nodePathTemplate = template; } /** * Builds the path of the node folder for the given node identifier * based on the configured node path template. * * @param id node identifier * @return node folder path */ private String buildNodeFolderPath(NodeId id) { StringBuilder sb = new StringBuilder(); char[] chars = id.toString().toCharArray(); int cnt = 0; for (int i = 0; i < nodePathTemplate.length(); i++) { char ch = nodePathTemplate.charAt(i); if (ch == 'x' && cnt < chars.length) { ch = chars[cnt++]; if (ch == '-') { ch = chars[cnt++]; } } sb.append(ch); } return sb.toString(); } private String buildPropFilePath(PropertyId id) { String fileName; try { MessageDigest md5 = MessageDigest.getInstance("MD5"); md5.update(id.getName().getNamespaceURI().getBytes()); md5.update(id.getName().getLocalName().getBytes()); byte[] bytes = md5.digest(); char[] chars = new char[32]; for (int i = 0, j = 0; i < 16; i++) { chars[j++] = HEXDIGITS[(bytes[i] >> 4) & 0x0f]; chars[j++] = HEXDIGITS[bytes[i] & 0x0f]; } fileName = new String(chars) + ".xml"; } catch (NoSuchAlgorithmException nsae) { // should never get here as MD5 should always be available in the JRE String msg = "MD5 not available"; log.error(msg, nsae); throw new InternalError(msg + nsae); } return buildNodeFolderPath(id.getParentId()) + "/" + fileName; } private String buildNodeFilePath(NodeId id) { return buildNodeFolderPath(id) + "/" + NODEFILENAME; } private String buildNodeReferencesFilePath(NodeId id) { return buildNodeFolderPath(id) + "/" + NODEREFSFILENAME; } private void readState(DOMWalker walker, NodeState state) throws ItemStateException { // first do some paranoid sanity checks if (!walker.getName().equals(NODE_ELEMENT)) { String msg = "invalid serialization format (unexpected element: " + walker.getName() + ")"; log.debug(msg); throw new ItemStateException(msg); } // check uuid if (!state.getNodeId().toString().equals(walker.getAttribute(UUID_ATTRIBUTE))) { String msg = "invalid serialized state: uuid mismatch"; log.debug(msg); throw new ItemStateException(msg); } // check nodetype String ntName = walker.getAttribute(NODETYPE_ATTRIBUTE); if (!factory.create(ntName).equals(state.getNodeTypeName())) { String msg = "invalid serialized state: nodetype mismatch"; log.debug(msg); throw new ItemStateException(msg); } // now we're ready to read state // primary parent String parentUUID = walker.getAttribute(PARENTUUID_ATTRIBUTE); if (parentUUID.length() > 0) { state.setParentId(NodeId.valueOf(parentUUID)); } // modification count String modCount = walker.getAttribute(MODCOUNT_ATTRIBUTE); state.setModCount(Short.parseShort(modCount)); // mixin types if (walker.enterElement(MIXINTYPES_ELEMENT)) { Set<Name> mixins = new HashSet<Name>(); while (walker.iterateElements(MIXINTYPE_ELEMENT)) { mixins.add(factory.create(walker.getAttribute(NAME_ATTRIBUTE))); } if (mixins.size() > 0) { state.setMixinTypeNames(mixins); } walker.leaveElement(); } // property entries if (walker.enterElement(PROPERTIES_ELEMENT)) { while (walker.iterateElements(PROPERTY_ELEMENT)) { String propName = walker.getAttribute(NAME_ATTRIBUTE); // @todo deserialize type and values state.addPropertyName(factory.create(propName)); } walker.leaveElement(); } // child node entries if (walker.enterElement(NODES_ELEMENT)) { while (walker.iterateElements(NODE_ELEMENT)) { String childName = walker.getAttribute(NAME_ATTRIBUTE); String childUUID = walker.getAttribute(UUID_ATTRIBUTE); state.addChildNodeEntry(factory.create(childName), NodeId.valueOf(childUUID)); } walker.leaveElement(); } } private void readState(DOMWalker walker, PropertyState state) throws ItemStateException { // first do some paranoid sanity checks if (!walker.getName().equals(PROPERTY_ELEMENT)) { String msg = "invalid serialization format (unexpected element: " + walker.getName() + ")"; log.debug(msg); throw new ItemStateException(msg); } // check name if (!state.getName().equals(factory.create(walker.getAttribute(NAME_ATTRIBUTE)))) { String msg = "invalid serialized state: name mismatch"; log.debug(msg); throw new ItemStateException(msg); } // check parentUUID NodeId parentId = NodeId.valueOf(walker.getAttribute(PARENTUUID_ATTRIBUTE)); if (!parentId.equals(state.getParentId())) { String msg = "invalid serialized state: parentUUID mismatch"; log.debug(msg); throw new ItemStateException(msg); } // now we're ready to read state // type String typeName = walker.getAttribute(TYPE_ATTRIBUTE); int type; try { type = PropertyType.valueFromName(typeName); } catch (IllegalArgumentException iae) { // should never be getting here throw new ItemStateException("unexpected property-type: " + typeName, iae); } state.setType(type); // multiValued String multiValued = walker.getAttribute(MULTIVALUED_ATTRIBUTE); state.setMultiValued(Boolean.parseBoolean(multiValued)); // modification count String modCount = walker.getAttribute(MODCOUNT_ATTRIBUTE); state.setModCount(Short.parseShort(modCount)); // values ArrayList<InternalValue> values = new ArrayList<InternalValue>(); if (walker.enterElement(VALUES_ELEMENT)) { while (walker.iterateElements(VALUE_ELEMENT)) { // read serialized value String content = walker.getContent(); if (PropertyType.STRING == type) { // STRING value can be empty; ignore length values.add(InternalValue.valueOf(content, type)); } else if (content.length() > 0) { // non-empty non-STRING value if (type == PropertyType.BINARY) { try { // special handling required for binary value: // the value stores the id of the BLOB data // in the BLOB store if (blobStore instanceof ResourceBasedBLOBStore) { // optimization: if the BLOB store is resource-based // retrieve the resource directly rather than having // to read the BLOB from an input stream FileSystemResource fsRes = ((ResourceBasedBLOBStore) blobStore).getResource(content); values.add(InternalValue.create(fsRes)); } else { InputStream in = blobStore.get(content); try { values.add(InternalValue.create(in)); } finally { IOUtils.closeQuietly(in); } } } catch (Exception e) { String msg = "error while reading serialized binary value"; log.debug(msg); throw new ItemStateException(msg, e); } } else { // non-empty non-STRING non-BINARY value values.add(InternalValue.valueOf(content, type)); } } else { // empty non-STRING value log.warn(state.getPropertyId() + ": ignoring empty value of type " + PropertyType.nameFromValue(type)); } } walker.leaveElement(); } state.setValues((InternalValue[]) values.toArray(new InternalValue[values.size()])); } private void readState(DOMWalker walker, NodeReferences refs) throws ItemStateException { // first do some paranoid sanity checks if (!walker.getName().equals(NODEREFERENCES_ELEMENT)) { String msg = "invalid serialization format (unexpected element: " + walker.getName() + ")"; log.debug(msg); throw new ItemStateException(msg); } // check targetId if (!refs.getTargetId().equals(NodeId.valueOf(walker.getAttribute(TARGETID_ATTRIBUTE)))) { String msg = "invalid serialized state: targetId mismatch"; log.debug(msg); throw new ItemStateException(msg); } // now we're ready to read the references data // property id's refs.clearAllReferences(); while (walker.iterateElements(NODEREFERENCE_ELEMENT)) { refs.addReference(PropertyId.valueOf(walker.getAttribute(PROPERTYID_ATTRIBUTE))); } } //---------------------------------------------------< PersistenceManager > /** * {@inheritDoc} */ public void init(PMContext context) throws Exception { if (initialized) { throw new IllegalStateException("already initialized"); } itemStateFS = new BasedFileSystem(context.getFileSystem(), "/data"); /** * store BLOB data in local file system in a sub directory * of the workspace home directory */ LocalFileSystem blobFS = new LocalFileSystem(); blobFS.setRoot(new File(context.getHomeDir(), "blobs")); blobFS.init(); this.blobFS = blobFS; blobStore = new FileSystemBLOBStore(blobFS); initialized = true; } /** * {@inheritDoc} */ public synchronized void close() throws Exception { if (!initialized) { throw new IllegalStateException("not initialized"); } try { // close BLOB file system blobFS.close(); blobFS = null; blobStore = null; /** * there's no need close the item state store because it * is based in the workspace's file system which is * closed by the repository */ } finally { initialized = false; } } /** * {@inheritDoc} */ public synchronized NodeState load(NodeId id) throws NoSuchItemStateException, ItemStateException { if (!initialized) { throw new IllegalStateException("not initialized"); } Exception e = null; String nodeFilePath = buildNodeFilePath(id); try { if (!itemStateFS.isFile(nodeFilePath)) { throw new NoSuchItemStateException(id.toString()); } InputStream in = itemStateFS.getInputStream(nodeFilePath); try { DOMWalker walker = new DOMWalker(in); String ntName = walker.getAttribute(NODETYPE_ATTRIBUTE); NodeState state = createNew(id); state.setNodeTypeName(factory.create(ntName)); readState(walker, state); return state; } finally { in.close(); } } catch (IOException ioe) { e = ioe; // fall through } catch (FileSystemException fse) { e = fse; // fall through } String msg = "failed to read node state: " + id; log.debug(msg); throw new ItemStateException(msg, e); } /** * {@inheritDoc} */ public synchronized PropertyState load(PropertyId id) throws NoSuchItemStateException, ItemStateException { if (!initialized) { throw new IllegalStateException("not initialized"); } Exception e = null; String propFilePath = buildPropFilePath(id); try { if (!itemStateFS.isFile(propFilePath)) { throw new NoSuchItemStateException(id.toString()); } InputStream in = itemStateFS.getInputStream(propFilePath); try { DOMWalker walker = new DOMWalker(in); PropertyState state = createNew(id); readState(walker, state); return state; } finally { in.close(); } } catch (IOException ioe) { e = ioe; // fall through } catch (FileSystemException fse) { e = fse; // fall through } String msg = "failed to read property state: " + id.toString(); log.debug(msg); throw new ItemStateException(msg, e); } /** * {@inheritDoc} */ protected void store(NodeState state) throws ItemStateException { if (!initialized) { throw new IllegalStateException("not initialized"); } NodeId id = state.getNodeId(); String nodeFilePath = buildNodeFilePath(id); FileSystemResource nodeFile = new FileSystemResource(itemStateFS, nodeFilePath); try { nodeFile.makeParentDirs(); OutputStream os = nodeFile.getOutputStream(); Writer writer = null; try { writer = new BufferedWriter(new OutputStreamWriter(os, DEFAULT_ENCODING)); String parentId = (state.getParentId() == null) ? "" : state.getParentId().toString(); String encodedNodeType = Text.encodeIllegalXMLCharacters(state.getNodeTypeName().toString()); writer.write("<?xml version=\"1.0\" encoding=\"" + DEFAULT_ENCODING.name() + "\"?>\n"); writer.write("<" + NODE_ELEMENT + " " + UUID_ATTRIBUTE + "=\"" + id + "\" " + PARENTUUID_ATTRIBUTE + "=\"" + parentId + "\" " + MODCOUNT_ATTRIBUTE + "=\"" + state.getModCount() + "\" " + NODETYPE_ATTRIBUTE + "=\"" + encodedNodeType + "\">\n"); // mixin types writer.write("\t<" + MIXINTYPES_ELEMENT + ">\n"); for (Name mixin : state.getMixinTypeNames()) { writer.write("\t\t<" + MIXINTYPE_ELEMENT + " " + NAME_ATTRIBUTE + "=\"" + Text.encodeIllegalXMLCharacters(mixin.toString()) + "\"/>\n"); } writer.write("\t</" + MIXINTYPES_ELEMENT + ">\n"); // properties writer.write("\t<" + PROPERTIES_ELEMENT + ">\n"); for (Name propName : state.getPropertyNames()) { writer.write("\t\t<" + PROPERTY_ELEMENT + " " + NAME_ATTRIBUTE + "=\"" + Text.encodeIllegalXMLCharacters(propName.toString()) + "\">\n"); // @todo serialize type, definition id and values writer.write("\t\t</" + PROPERTY_ELEMENT + ">\n"); } writer.write("\t</" + PROPERTIES_ELEMENT + ">\n"); // child nodes writer.write("\t<" + NODES_ELEMENT + ">\n"); for (ChildNodeEntry entry : state.getChildNodeEntries()) { writer.write("\t\t<" + NODE_ELEMENT + " " + NAME_ATTRIBUTE + "=\"" + Text.encodeIllegalXMLCharacters(entry.getName().toString()) + "\" " + UUID_ATTRIBUTE + "=\"" + entry.getId() + "\">\n"); writer.write("\t\t</" + NODE_ELEMENT + ">\n"); } writer.write("\t</" + NODES_ELEMENT + ">\n"); writer.write("</" + NODE_ELEMENT + ">\n"); } finally { writer.close(); } } catch (Exception e) { String msg = "failed to write node state: " + id; log.debug(msg); throw new ItemStateException(msg, e); } } /** * {@inheritDoc} */ protected void store(PropertyState state) throws ItemStateException { if (!initialized) { throw new IllegalStateException("not initialized"); } String propFilePath = buildPropFilePath(state.getPropertyId()); FileSystemResource propFile = new FileSystemResource(itemStateFS, propFilePath); try { propFile.makeParentDirs(); OutputStream os = propFile.getOutputStream(); // write property state to xml file Writer writer = null; try { writer = new BufferedWriter(new OutputStreamWriter(os, DEFAULT_ENCODING)); String typeName; int type = state.getType(); try { typeName = PropertyType.nameFromValue(type); } catch (IllegalArgumentException iae) { // should never be getting here throw new ItemStateException("unexpected property-type ordinal: " + type, iae); } writer.write("<?xml version=\"1.0\" encoding=\"" + DEFAULT_ENCODING.name() + "\"?>\n"); writer.write("<" + PROPERTY_ELEMENT + " " + NAME_ATTRIBUTE + "=\"" + Text.encodeIllegalXMLCharacters(state.getName().toString()) + "\" " + PARENTUUID_ATTRIBUTE + "=\"" + state.getParentId() + "\" " + MULTIVALUED_ATTRIBUTE + "=\"" + Boolean.toString(state.isMultiValued()) + "\" " + MODCOUNT_ATTRIBUTE + "=\"" + state.getModCount() + "\" " + TYPE_ATTRIBUTE + "=\"" + typeName + "\">\n"); // values writer.write("\t<" + VALUES_ELEMENT + ">\n"); InternalValue[] values = state.getValues(); if (values != null) { for (int i = 0; i < values.length; i++) { writer.write("\t\t<" + VALUE_ELEMENT + ">"); InternalValue val = values[i]; if (val != null) { if (type == PropertyType.BINARY) { // special handling required for binary value: // put binary value in BLOB store InputStream in = val.getStream(); String blobId = blobStore.createId(state.getPropertyId(), i); try { blobStore.put(blobId, in, val.getLength()); } finally { IOUtils.closeQuietly(in); } // store id of BLOB as property value writer.write(blobId); // replace value instance with value backed by resource // in BLOB store and discard old value instance (e.g. temp file) if (blobStore instanceof ResourceBasedBLOBStore) { // optimization: if the BLOB store is resource-based // retrieve the resource directly rather than having // to read the BLOB from an input stream FileSystemResource fsRes = ((ResourceBasedBLOBStore) blobStore).getResource(blobId); values[i] = InternalValue.create(fsRes); } else { in = blobStore.get(blobId); try { values[i] = InternalValue.create(in); } finally { try { in.close(); } catch (IOException e) { // ignore } } } val.discard(); } else { writer.write(Text.encodeIllegalXMLCharacters(val.toString())); } } writer.write("</" + VALUE_ELEMENT + ">\n"); } } writer.write("\t</" + VALUES_ELEMENT + ">\n"); writer.write("</" + PROPERTY_ELEMENT + ">\n"); } finally { writer.close(); } } catch (Exception e) { String msg = "failed to store property state: " + state.getParentId() + "/" + state.getName(); log.debug(msg); throw new ItemStateException(msg, e); } } /** * {@inheritDoc} */ protected void destroy(NodeState state) throws ItemStateException { if (!initialized) { throw new IllegalStateException("not initialized"); } NodeId id = state.getNodeId(); String nodeFilePath = buildNodeFilePath(id); FileSystemResource nodeFile = new FileSystemResource(itemStateFS, nodeFilePath); try { if (nodeFile.exists()) { // delete resource and prune empty parent folders nodeFile.delete(true); } } catch (FileSystemException fse) { String msg = "failed to delete node state: " + id; log.debug(msg); throw new ItemStateException(msg, fse); } } /** * {@inheritDoc} */ protected void destroy(PropertyState state) throws ItemStateException { if (!initialized) { throw new IllegalStateException("not initialized"); } // delete binary values (stored as files) InternalValue[] values = state.getValues(); if (values != null) { for (int i = 0; i < values.length; i++) { InternalValue val = values[i]; if (val != null) { val.deleteBinaryResource(); } } } // delete property file String propFilePath = buildPropFilePath(state.getPropertyId()); FileSystemResource propFile = new FileSystemResource(itemStateFS, propFilePath); try { if (propFile.exists()) { // delete resource and prune empty parent folders propFile.delete(true); } } catch (FileSystemException fse) { String msg = "failed to delete property state: " + state.getParentId() + "/" + state.getName(); log.debug(msg); throw new ItemStateException(msg, fse); } } /** * {@inheritDoc} */ public synchronized NodeReferences loadReferencesTo(NodeId id) throws NoSuchItemStateException, ItemStateException { if (!initialized) { throw new IllegalStateException("not initialized"); } Exception e = null; String refsFilePath = buildNodeReferencesFilePath(id); try { if (!itemStateFS.isFile(refsFilePath)) { throw new NoSuchItemStateException(id.toString()); } InputStream in = itemStateFS.getInputStream(refsFilePath); try { DOMWalker walker = new DOMWalker(in); NodeReferences refs = new NodeReferences(id); readState(walker, refs); return refs; } finally { in.close(); } } catch (IOException ioe) { e = ioe; // fall through } catch (FileSystemException fse) { e = fse; // fall through } String msg = "failed to load references: " + id; log.debug(msg); throw new ItemStateException(msg, e); } /** * {@inheritDoc} */ protected void store(NodeReferences refs) throws ItemStateException { if (!initialized) { throw new IllegalStateException("not initialized"); } String refsFilePath = buildNodeReferencesFilePath(refs.getTargetId()); FileSystemResource refsFile = new FileSystemResource(itemStateFS, refsFilePath); try { refsFile.makeParentDirs(); OutputStream os = refsFile.getOutputStream(); BufferedWriter writer = null; try { writer = new BufferedWriter(new OutputStreamWriter(os, DEFAULT_ENCODING)); writer.write("<?xml version=\"1.0\" encoding=\"" + DEFAULT_ENCODING.name() + "\"?>\n"); writer.write("<" + NODEREFERENCES_ELEMENT + " " + TARGETID_ATTRIBUTE + "=\"" + refs.getTargetId() + "\">\n"); // write references (i.e. the id's of the REFERENCE properties) for (PropertyId propId : refs.getReferences()) { writer.write("\t<" + NODEREFERENCE_ELEMENT + " " + PROPERTYID_ATTRIBUTE + "=\"" + propId + "\"/>\n"); } writer.write("</" + NODEREFERENCES_ELEMENT + ">\n"); } finally { writer.close(); } } catch (Exception e) { String msg = "failed to store " + refs; log.debug(msg); throw new ItemStateException(msg, e); } } /** * {@inheritDoc} */ protected void destroy(NodeReferences refs) throws ItemStateException { if (!initialized) { throw new IllegalStateException("not initialized"); } String refsFilePath = buildNodeReferencesFilePath(refs.getTargetId()); FileSystemResource refsFile = new FileSystemResource(itemStateFS, refsFilePath); try { if (refsFile.exists()) { // delete resource and prune empty parent folders refsFile.delete(true); } } catch (FileSystemException fse) { String msg = "failed to delete " + refs; log.debug(msg); throw new ItemStateException(msg, fse); } } /** * {@inheritDoc} */ public synchronized boolean exists(NodeId id) throws ItemStateException { if (!initialized) { throw new IllegalStateException("not initialized"); } try { String nodeFilePath = buildNodeFilePath(id); FileSystemResource nodeFile = new FileSystemResource(itemStateFS, nodeFilePath); return nodeFile.exists(); } catch (FileSystemException fse) { String msg = "failed to check existence of item state: " + id; log.debug(msg); throw new ItemStateException(msg, fse); } } /** * {@inheritDoc} */ public synchronized boolean exists(PropertyId id) throws ItemStateException { if (!initialized) { throw new IllegalStateException("not initialized"); } try { String propFilePath = buildPropFilePath(id); FileSystemResource propFile = new FileSystemResource(itemStateFS, propFilePath); return propFile.exists(); } catch (FileSystemException fse) { String msg = "failed to check existence of item state: " + id; log.error(msg, fse); throw new ItemStateException(msg, fse); } } /** * {@inheritDoc} */ public synchronized boolean existsReferencesTo(NodeId id) throws ItemStateException { if (!initialized) { throw new IllegalStateException("not initialized"); } try { String refsFilePath = buildNodeReferencesFilePath(id); FileSystemResource refsFile = new FileSystemResource(itemStateFS, refsFilePath); return refsFile.exists(); } catch (FileSystemException fse) { String msg = "failed to check existence of references: " + id; log.debug(msg); throw new ItemStateException(msg, fse); } } }
apache/nifi
36,208
nifi-extension-bundles/nifi-kafka-bundle/nifi-kafka-processors/src/main/java/org/apache/nifi/kafka/processors/PublishKafka.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.nifi.kafka.processors; import org.apache.nifi.annotation.behavior.InputRequirement; import org.apache.nifi.annotation.behavior.ReadsAttribute; import org.apache.nifi.annotation.behavior.WritesAttribute; import org.apache.nifi.annotation.documentation.CapabilityDescription; import org.apache.nifi.annotation.documentation.SeeAlso; import org.apache.nifi.annotation.documentation.Tags; import org.apache.nifi.annotation.lifecycle.OnScheduled; import org.apache.nifi.annotation.lifecycle.OnStopped; import org.apache.nifi.components.ConfigVerificationResult; import org.apache.nifi.components.PropertyDescriptor; import org.apache.nifi.components.PropertyValue; import org.apache.nifi.expression.ExpressionLanguageScope; import org.apache.nifi.flowfile.FlowFile; import org.apache.nifi.flowfile.attributes.CoreAttributes; import org.apache.nifi.kafka.processors.producer.PartitionStrategy; import org.apache.nifi.kafka.processors.producer.common.PublishKafkaUtil; import org.apache.nifi.kafka.processors.producer.config.DeliveryGuarantee; import org.apache.nifi.kafka.processors.producer.convert.DelimitedStreamKafkaRecordConverter; import org.apache.nifi.kafka.processors.producer.convert.FlowFileStreamKafkaRecordConverter; import org.apache.nifi.kafka.processors.producer.convert.KafkaRecordConverter; import org.apache.nifi.kafka.processors.producer.convert.RecordStreamKafkaRecordConverter; import org.apache.nifi.kafka.processors.producer.convert.RecordWrapperStreamKafkaRecordConverter; import org.apache.nifi.kafka.processors.producer.header.AttributesHeadersFactory; import org.apache.nifi.kafka.processors.producer.header.HeadersFactory; import org.apache.nifi.kafka.processors.producer.key.AttributeKeyFactory; import org.apache.nifi.kafka.processors.producer.key.KeyFactory; import org.apache.nifi.kafka.processors.producer.key.MessageKeyFactory; import org.apache.nifi.kafka.processors.producer.wrapper.RecordMetadataStrategy; import org.apache.nifi.kafka.service.api.KafkaConnectionService; import org.apache.nifi.kafka.service.api.common.PartitionState; import org.apache.nifi.kafka.service.api.producer.FlowFileResult; import org.apache.nifi.kafka.service.api.producer.KafkaProducerService; import org.apache.nifi.kafka.service.api.producer.KafkaRecordPartitioner; import org.apache.nifi.kafka.service.api.producer.ProducerConfiguration; import org.apache.nifi.kafka.service.api.producer.PublishContext; import org.apache.nifi.kafka.service.api.producer.RecordSummary; import org.apache.nifi.kafka.service.api.record.KafkaRecord; import org.apache.nifi.kafka.shared.attribute.KafkaFlowFileAttribute; import org.apache.nifi.kafka.shared.property.FailureStrategy; import org.apache.nifi.kafka.shared.property.KeyEncoding; import org.apache.nifi.kafka.shared.property.PublishStrategy; import org.apache.nifi.kafka.shared.transaction.TransactionIdSupplier; import org.apache.nifi.logging.ComponentLog; import org.apache.nifi.processor.AbstractProcessor; import org.apache.nifi.processor.DataUnit; import org.apache.nifi.processor.ProcessContext; import org.apache.nifi.processor.ProcessSession; import org.apache.nifi.processor.Relationship; import org.apache.nifi.processor.VerifiableProcessor; import org.apache.nifi.processor.exception.ProcessException; import org.apache.nifi.processor.io.InputStreamCallback; import org.apache.nifi.processor.util.StandardValidators; import org.apache.nifi.serialization.RecordReaderFactory; import org.apache.nifi.serialization.RecordSetWriterFactory; import java.io.InputStream; import java.nio.charset.Charset; import java.nio.charset.StandardCharsets; import java.util.ArrayList; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.Map; import java.util.Objects; import java.util.Queue; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentMap; import java.util.concurrent.LinkedBlockingQueue; import java.util.concurrent.atomic.AtomicLong; import java.util.function.Supplier; import java.util.regex.Pattern; import java.util.stream.Collectors; @Tags({"Apache", "Kafka", "Record", "csv", "json", "avro", "logs", "Put", "Send", "Message", "PubSub"}) @CapabilityDescription("Sends the contents of a FlowFile as either a message or as individual records to Apache Kafka using the Kafka Producer API. " + "The messages to send may be individual FlowFiles, may be delimited using a " + "user-specified delimiter (such as a new-line), or " + "may be record-oriented data that can be read by the configured Record Reader. " + "The complementary NiFi processor for fetching messages is ConsumeKafka. " + "To produce a kafka tombstone message while using PublishStrategy.USE_WRAPPER, simply set the value of a record to 'null'.") @InputRequirement(InputRequirement.Requirement.INPUT_REQUIRED) @ReadsAttribute(attribute = KafkaFlowFileAttribute.KAFKA_TOMBSTONE, description = "If this attribute is set to 'true', if the processor is not configured " + "with a demarcator and if the FlowFile's content is null, then a tombstone message with zero bytes will be sent to Kafka.") @WritesAttribute(attribute = "msg.count", description = "The number of messages that were sent to Kafka for this FlowFile. This attribute is added only to " + "FlowFiles that are routed to success.") @SeeAlso({ConsumeKafka.class}) public class PublishKafka extends AbstractProcessor implements VerifiableProcessor { protected static final String MSG_COUNT = "msg.count"; public static final PropertyDescriptor CONNECTION_SERVICE = new PropertyDescriptor.Builder() .name("Kafka Connection Service") .description("Provides connections to Kafka Broker for publishing Kafka Records") .identifiesControllerService(KafkaConnectionService.class) .expressionLanguageSupported(ExpressionLanguageScope.NONE) .required(true) .build(); public static final PropertyDescriptor TOPIC_NAME = new PropertyDescriptor.Builder() .name("Topic Name") .description("Name of the Kafka Topic to which the Processor publishes Kafka Records") .required(true) .addValidator(StandardValidators.NON_BLANK_VALIDATOR) .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .build(); public static final PropertyDescriptor FAILURE_STRATEGY = new PropertyDescriptor.Builder() .name("Failure Strategy") .description("Specifies how the processor handles a FlowFile if it is unable to publish the data to Kafka") .required(true) .allowableValues(FailureStrategy.class) .defaultValue(FailureStrategy.ROUTE_TO_FAILURE) .build(); static final PropertyDescriptor DELIVERY_GUARANTEE = new PropertyDescriptor.Builder() .name("acks") .displayName("Delivery Guarantee") .description("Specifies the requirement for guaranteeing that a message is sent to Kafka. Corresponds to Kafka Client acks property.") .required(true) .expressionLanguageSupported(ExpressionLanguageScope.NONE) .allowableValues(DeliveryGuarantee.class) .defaultValue(DeliveryGuarantee.DELIVERY_REPLICATED) .build(); static final PropertyDescriptor COMPRESSION_CODEC = new PropertyDescriptor.Builder() .name("compression.type") .displayName("Compression Type") .description("Specifies the compression strategy for records sent to Kafka. Corresponds to Kafka Client compression.type property.") .required(true) .allowableValues("none", "gzip", "snappy", "lz4", "zstd") .defaultValue("none") .build(); public static final PropertyDescriptor MAX_REQUEST_SIZE = new PropertyDescriptor.Builder() .name("max.request.size") .displayName("Max Request Size") .description("The maximum size of a request in bytes. Corresponds to Kafka Client max.request.size property.") .required(true) .addValidator(StandardValidators.DATA_SIZE_VALIDATOR) .defaultValue("1 MB") .build(); public static final PropertyDescriptor TRANSACTIONS_ENABLED = new PropertyDescriptor.Builder() .name("Transactions Enabled") .description("Specifies whether to provide transactional guarantees when communicating with Kafka. If there is a problem sending data to Kafka, " + "and this property is set to false, then the messages that have already been sent to Kafka will continue on and be delivered to consumers. " + "If this is set to true, then the Kafka transaction will be rolled back so that those messages are not available to consumers. Setting this to true " + "requires that the [Delivery Guarantee] property be set to [Guarantee Replicated Delivery.]") .expressionLanguageSupported(ExpressionLanguageScope.NONE) .allowableValues("true", "false") .defaultValue("true") .required(true) .build(); static final PropertyDescriptor TRANSACTIONAL_ID_PREFIX = new PropertyDescriptor.Builder() .name("Transactional ID Prefix") .description("Specifies the KafkaProducer config transactional.id will be a generated UUID and will be prefixed with the configured string.") .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .addValidator(StandardValidators.NON_EMPTY_EL_VALIDATOR) .dependsOn(TRANSACTIONS_ENABLED, "true") .required(false) .build(); static final PropertyDescriptor PARTITION_CLASS = new PropertyDescriptor.Builder() .name("partitioner.class") .displayName("Partitioner Class") .description("Specifies which class to use to compute a partition id for a message. Corresponds to Kafka Client partitioner.class property.") .allowableValues(PartitionStrategy.class) .defaultValue(PartitionStrategy.DEFAULT_PARTITIONER.getValue()) .required(true) .build(); public static final PropertyDescriptor PARTITION = new PropertyDescriptor.Builder() .name("partition") .displayName("Partition") .description("Specifies the Kafka Partition destination for Records.") .required(false) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .build(); static final PropertyDescriptor MESSAGE_DEMARCATOR = new PropertyDescriptor.Builder() .name("Message Demarcator") .required(false) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .description("Specifies the string (interpreted as UTF-8) to use for demarcating multiple messages within " + "a single FlowFile. If not specified, the entire content of the FlowFile will be used as a single message. If specified, the " + "contents of the FlowFile will be split on this delimiter and each section sent as a separate Kafka message. " + "To enter special character such as 'new line' use CTRL+Enter or Shift+Enter, depending on your OS.") .build(); static final PropertyDescriptor RECORD_READER = new PropertyDescriptor.Builder() .name("Record Reader") .description("The Record Reader to use for incoming FlowFiles") .identifiesControllerService(RecordReaderFactory.class) .expressionLanguageSupported(ExpressionLanguageScope.NONE) .build(); static final PropertyDescriptor RECORD_WRITER = new PropertyDescriptor.Builder() .name("Record Writer") .description("The Record Writer to use in order to serialize the data before sending to Kafka") .identifiesControllerService(RecordSetWriterFactory.class) .expressionLanguageSupported(ExpressionLanguageScope.NONE) .build(); public static final PropertyDescriptor PUBLISH_STRATEGY = new PropertyDescriptor.Builder() .name("Publish Strategy") .description("The format used to publish the incoming FlowFile record to Kafka.") .required(true) .defaultValue(PublishStrategy.USE_VALUE) .dependsOn(RECORD_READER) .allowableValues(PublishStrategy.class) .build(); public static final PropertyDescriptor MESSAGE_KEY_FIELD = new PropertyDescriptor.Builder() .name("Message Key Field") .description("The name of a field in the Input Records that should be used as the Key for the Kafka message.") .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .dependsOn(PUBLISH_STRATEGY, PublishStrategy.USE_VALUE) .required(false) .build(); public static final PropertyDescriptor ATTRIBUTE_HEADER_PATTERN = new PropertyDescriptor.Builder() .name("FlowFile Attribute Header Pattern") .description("A Regular Expression that is matched against all FlowFile attribute names. " + "Any attribute whose name matches the pattern will be added to the Kafka messages as a Header. " + "If not specified, no FlowFile attributes will be added as headers.") .addValidator(StandardValidators.REGULAR_EXPRESSION_VALIDATOR) .expressionLanguageSupported(ExpressionLanguageScope.NONE) .required(false) .build(); static final PropertyDescriptor HEADER_ENCODING = new PropertyDescriptor.Builder() .name("Header Encoding") .description("For any attribute that is added as a Kafka Record Header, this property indicates the Character Encoding to use for serializing the headers.") .addValidator(StandardValidators.CHARACTER_SET_VALIDATOR) .defaultValue(StandardCharsets.UTF_8.displayName()) .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .required(true) .dependsOn(ATTRIBUTE_HEADER_PATTERN) .build(); static final PropertyDescriptor KAFKA_KEY = new PropertyDescriptor.Builder() .name("Kafka Key") .description("The Key to use for the Message. " + "If not specified, the FlowFile attribute 'kafka.key' is used as the message key, if it is present." + "Beware that setting Kafka key and demarcating at the same time may potentially lead to many Kafka messages with the same key." + "Normally this is not a problem as Kafka does not enforce or assume message and key uniqueness. Still, setting the demarcator and Kafka key at the same time poses a risk of " + "data loss on Kafka. During a topic compaction on Kafka, messages will be deduplicated based on this key.") .required(false) .addValidator(StandardValidators.NON_EMPTY_VALIDATOR) .expressionLanguageSupported(ExpressionLanguageScope.FLOWFILE_ATTRIBUTES) .build(); static final PropertyDescriptor KEY_ATTRIBUTE_ENCODING = new PropertyDescriptor.Builder() .name("Kafka Key Attribute Encoding") .description("FlowFiles that are emitted have an attribute named '" + KafkaFlowFileAttribute.KAFKA_KEY + "'. This property dictates how the value of the attribute should be encoded.") .required(true) .defaultValue(KeyEncoding.UTF8) .allowableValues(KeyEncoding.class) .build(); static final PropertyDescriptor RECORD_KEY_WRITER = new PropertyDescriptor.Builder() .name("Record Key Writer") .description("The Record Key Writer to use for outgoing FlowFiles") .required(false) .identifiesControllerService(RecordSetWriterFactory.class) .build(); public static final PropertyDescriptor RECORD_METADATA_STRATEGY = new PropertyDescriptor.Builder() .name("Record Metadata Strategy") .description("Specifies whether the Record's metadata (topic and partition) should come from the Record's metadata field or if it should come from the configured " + "Topic Name and Partition / Partitioner class properties") .required(true) .defaultValue(RecordMetadataStrategy.FROM_PROPERTIES) .allowableValues(RecordMetadataStrategy.class) .dependsOn(PUBLISH_STRATEGY, PublishStrategy.USE_WRAPPER) .build(); public static final Relationship REL_SUCCESS = new Relationship.Builder() .name("success") .description("FlowFiles for which all content was sent to Kafka.") .build(); public static final Relationship REL_FAILURE = new Relationship.Builder() .name("failure") .description("Any FlowFile that cannot be sent to Kafka will be routed to this Relationship") .build(); private static final List<PropertyDescriptor> PROPERTY_DESCRIPTORS = List.of( CONNECTION_SERVICE, TOPIC_NAME, FAILURE_STRATEGY, DELIVERY_GUARANTEE, COMPRESSION_CODEC, MAX_REQUEST_SIZE, TRANSACTIONS_ENABLED, TRANSACTIONAL_ID_PREFIX, PARTITION_CLASS, PARTITION, MESSAGE_DEMARCATOR, RECORD_READER, RECORD_WRITER, PUBLISH_STRATEGY, MESSAGE_KEY_FIELD, ATTRIBUTE_HEADER_PATTERN, HEADER_ENCODING, KAFKA_KEY, KEY_ATTRIBUTE_ENCODING, RECORD_KEY_WRITER, RECORD_METADATA_STRATEGY ); private static final Set<Relationship> RELATIONSHIPS = Set.of( REL_SUCCESS, REL_FAILURE ); private final Queue<KafkaProducerService> producerServices = new LinkedBlockingQueue<>(); private volatile KafkaRecordPartitioner partitioner; private volatile String brokerUri; @Override public List<PropertyDescriptor> getSupportedPropertyDescriptors() { return PROPERTY_DESCRIPTORS; } @Override public Set<Relationship> getRelationships() { return RELATIONSHIPS; } @Override public List<ConfigVerificationResult> verify(final ProcessContext context, final ComponentLog verificationLogger, final Map<String, String> attributes) { final List<ConfigVerificationResult> verificationResults = new ArrayList<>(); final KafkaConnectionService connectionService = context.getProperty(CONNECTION_SERVICE).asControllerService(KafkaConnectionService.class); final boolean transactionsEnabled = context.getProperty(TRANSACTIONS_ENABLED).asBoolean(); final String transactionalIdPrefix = transactionsEnabled ? context.getProperty(TRANSACTIONAL_ID_PREFIX).evaluateAttributeExpressions().getValue() : null; final Supplier<String> transactionalIdSupplier = new TransactionIdSupplier(transactionalIdPrefix); final String deliveryGuarantee = context.getProperty(DELIVERY_GUARANTEE).getValue(); final String compressionCodec = context.getProperty(COMPRESSION_CODEC).getValue(); final String partitionClass = context.getProperty(PARTITION_CLASS).getValue(); final int maxRequestSize = context.getProperty(MAX_REQUEST_SIZE).asDataSize(DataUnit.B).intValue(); final ProducerConfiguration producerConfiguration = new ProducerConfiguration( transactionsEnabled, transactionalIdSupplier.get(), deliveryGuarantee, compressionCodec, partitionClass, maxRequestSize); try (final KafkaProducerService producerService = connectionService.getProducerService(producerConfiguration)) { final ConfigVerificationResult.Builder verificationPartitions = new ConfigVerificationResult.Builder() .verificationStepName("Verify Topic Partitions"); final String topicName = context.getProperty(TOPIC_NAME).evaluateAttributeExpressions(attributes).getValue(); try { final List<PartitionState> partitionStates = producerService.getPartitionStates(topicName); verificationPartitions .outcome(ConfigVerificationResult.Outcome.SUCCESSFUL) .explanation(String.format("Partitions [%d] found for Topic [%s]", partitionStates.size(), topicName)); } catch (final Exception e) { getLogger().error("Topic [%s] Partition verification failed", topicName, e); verificationPartitions .outcome(ConfigVerificationResult.Outcome.FAILED) .explanation(String.format("Topic [%s] Partition access failed: %s", topicName, e)); } verificationResults.add(verificationPartitions.build()); return verificationResults; } } @OnStopped public void onStopped() { // Ensure that we close all Producer services when stopped KafkaProducerService service; while ((service = producerServices.poll()) != null) { service.close(); } } @OnScheduled public void onScheduled(final ProcessContext context) { final String partitionClass = context.getProperty(PARTITION_CLASS).getValue(); if (partitionClass.equalsIgnoreCase(PartitionStrategy.ROUND_ROBIN_PARTITIONING.getValue())) { partitioner = new RoundRobinPartitioner(); } else if (partitionClass.equalsIgnoreCase(PartitionStrategy.EXPRESSION_LANGUAGE_PARTITIONING.getValue())) { partitioner = new ExpressionLanguagePartitioner(context.getProperty(PARTITION)); } else { partitioner = null; } final KafkaConnectionService connectionService = context.getProperty(CONNECTION_SERVICE).asControllerService(KafkaConnectionService.class); brokerUri = connectionService.getBrokerUri(); } @Override public void onTrigger(final ProcessContext context, final ProcessSession session) throws ProcessException { final List<FlowFile> flowFiles = PublishKafkaUtil.pollFlowFiles(session); if (flowFiles.isEmpty()) { return; } final KafkaProducerService producerService = getProducerService(context); try { publishFlowFiles(context, session, flowFiles, producerService); } catch (final Exception e) { final String uuids = flowFiles.stream() .map(ff -> ff.getAttribute(CoreAttributes.UUID.key())) .collect(Collectors.joining(", ")); getLogger().error("Failed to publish {} FlowFiles to Kafka: uuids={}", flowFiles.size(), uuids, e); producerService.close(); } finally { if (!producerService.isClosed()) { producerServices.offer(producerService); } } } private KafkaProducerService getProducerService(final ProcessContext context) { final KafkaProducerService producerService = producerServices.poll(); if (producerService != null) { return producerService; } return createProducerService(context); } private KafkaProducerService createProducerService(final ProcessContext context) { final KafkaConnectionService connectionService = context.getProperty(CONNECTION_SERVICE).asControllerService(KafkaConnectionService.class); final boolean transactionsEnabled = context.getProperty(TRANSACTIONS_ENABLED).asBoolean(); final String transactionalIdPrefix = transactionsEnabled ? context.getProperty(TRANSACTIONAL_ID_PREFIX).evaluateAttributeExpressions().getValue() : null; final String deliveryGuarantee = context.getProperty(DELIVERY_GUARANTEE).getValue(); final String compressionCodec = context.getProperty(COMPRESSION_CODEC).getValue(); final String partitionClass = context.getProperty(PARTITION_CLASS).getValue(); final int maxRequestSize = context.getProperty(MAX_REQUEST_SIZE).asDataSize(DataUnit.B).intValue(); final ProducerConfiguration producerConfiguration = new ProducerConfiguration( transactionsEnabled, transactionalIdPrefix, deliveryGuarantee, compressionCodec, partitionClass, maxRequestSize); return connectionService.getProducerService(producerConfiguration); } private void publishFlowFiles(final ProcessContext context, final ProcessSession session, final List<FlowFile> flowFiles, final KafkaProducerService producerService) { // Publish all FlowFiles and ensure that we call complete() on the producer and route flowfiles as appropriate, regardless // of the outcome. If there are failures, the complete() method will abort the transaction (if transactions are enabled). // Otherwise, it will commit the transaction (if transactions are enabled). We then route the FlowFiles based on the results. try { for (final FlowFile flowFile : flowFiles) { publishFlowFile(context, session, flowFile, producerService); } } finally { RecordSummary recordSummary = null; Exception completeTransactionException = null; try { recordSummary = producerService.complete(); } catch (final Exception e) { completeTransactionException = e; getLogger().warn("Failed to complete transaction with Kafka", e); producerService.close(); } if (recordSummary == null || recordSummary.isFailure()) { final FailureStrategy strategy = context.getProperty(FAILURE_STRATEGY).asAllowableValue(FailureStrategy.class); final String action = (FailureStrategy.ROLLBACK == strategy) ? "roll back" : "route to failure"; if (recordSummary == null) { getLogger().error("Failed to publish {} FlowFiles to Kafka; will {}", flowFiles.size(), action, completeTransactionException); } else { for (final FlowFileResult failureResult : recordSummary.getFlowFileResults()) { final FlowFile flowFile = failureResult.getFlowFile(); final List<Exception> failureReason = failureResult.getExceptions(); final Exception cause = (failureReason == null || failureReason.isEmpty()) ? completeTransactionException : failureReason.getFirst(); getLogger().error("Failed to publish {}; will {}", flowFile, action, cause); } } routeFailureStrategy(context, session, flowFiles); } else { routeResults(session, recordSummary.getFlowFileResults()); } } } private void routeFailureStrategy(final ProcessContext context, final ProcessSession session, final List<FlowFile> flowFiles) { final FailureStrategy strategy = context.getProperty(FAILURE_STRATEGY).asAllowableValue(FailureStrategy.class); if (FailureStrategy.ROLLBACK == strategy) { session.rollback(); context.yield(); } else { session.transfer(flowFiles, REL_FAILURE); } } private void routeResults(final ProcessSession session, final List<FlowFileResult> flowFileResults) { for (final FlowFileResult flowFileResult : flowFileResults) { final long msgCount = flowFileResult.getSentCount(); final FlowFile flowFile = session.putAttribute(flowFileResult.getFlowFile(), MSG_COUNT, String.valueOf(msgCount)); session.adjustCounter("Messages Sent", msgCount, true); final Relationship relationship = flowFileResult.getExceptions().isEmpty() ? REL_SUCCESS : REL_FAILURE; for (final Map.Entry<String, Long> entry : flowFileResult.getSentPerTopic().entrySet()) { session.adjustCounter("Messages Sent to " + entry.getKey(), entry.getValue(), true); if (relationship == REL_SUCCESS) { final String topicUri = brokerUri + "/" + entry.getKey(); final String eventDetails = String.format("Sent %d of %d records", entry.getValue(), msgCount); session.getProvenanceReporter().send(flowFile, topicUri, eventDetails); } } session.transfer(flowFile, relationship); } } private void publishFlowFile(final ProcessContext context, final ProcessSession session, final FlowFile flowFile, final KafkaProducerService producerService) { final String topic = context.getProperty(TOPIC_NAME).evaluateAttributeExpressions(flowFile.getAttributes()).getValue(); final PublishContext publishContext = new PublishContext(topic, partitioner, null, flowFile); final KafkaRecordConverter kafkaRecordConverter = getKafkaRecordConverter(context, flowFile); final PublishCallback callback = new PublishCallback(producerService, publishContext, kafkaRecordConverter, flowFile.getAttributes(), flowFile.getSize()); session.read(flowFile, callback); } private KafkaRecordConverter getKafkaRecordConverter(final ProcessContext context, final FlowFile flowFile) { final RecordReaderFactory readerFactory = context.getProperty(RECORD_READER).asControllerService(RecordReaderFactory.class); final RecordSetWriterFactory writerFactory = context.getProperty(RECORD_WRITER).asControllerService(RecordSetWriterFactory.class); final PublishStrategy publishStrategy = readerFactory != null ? context.getProperty(PUBLISH_STRATEGY).asAllowableValue(PublishStrategy.class) : null; final String attributeHeaderPatternProperty = context.getProperty(ATTRIBUTE_HEADER_PATTERN).getValue(); final Pattern attributeHeaderPattern = attributeHeaderPatternProperty == null ? null : Pattern.compile(attributeHeaderPatternProperty); final Charset headerEncoding = attributeHeaderPattern == null ? null : Charset.forName(context.getProperty(HEADER_ENCODING).evaluateAttributeExpressions().getValue()); final HeadersFactory headersFactory = new AttributesHeadersFactory(attributeHeaderPattern, headerEncoding); final int maxMessageSize = context.getProperty(MAX_REQUEST_SIZE).asDataSize(DataUnit.B).intValue(); final RecordSetWriterFactory keyWriterFactory = context.getProperty(RECORD_KEY_WRITER).asControllerService(RecordSetWriterFactory.class); final PropertyValue kafkaKeyAttribute = context.getProperty(KAFKA_KEY); final String keyAttributeEncoding = context.getProperty(KEY_ATTRIBUTE_ENCODING).getValue(); final String messageKeyField = publishStrategy == PublishStrategy.USE_VALUE ? context.getProperty(MESSAGE_KEY_FIELD).evaluateAttributeExpressions(flowFile).getValue() : null; final KeyFactory keyFactory = messageKeyField != null ? new MessageKeyFactory(flowFile, messageKeyField, keyWriterFactory, getLogger()) : new AttributeKeyFactory(flowFile, kafkaKeyAttribute, keyAttributeEncoding); if (readerFactory != null && writerFactory != null) { return switch (publishStrategy) { case USE_WRAPPER -> { final RecordMetadataStrategy metadataStrategy = context.getProperty(RECORD_METADATA_STRATEGY).asAllowableValue(RecordMetadataStrategy.class); yield new RecordWrapperStreamKafkaRecordConverter(flowFile, metadataStrategy, readerFactory, writerFactory, keyWriterFactory, maxMessageSize, getLogger()); } case USE_VALUE -> new RecordStreamKafkaRecordConverter(readerFactory, writerFactory, headersFactory, keyFactory, maxMessageSize, getLogger()); }; } final String demarcator = context.getProperty(MESSAGE_DEMARCATOR).evaluateAttributeExpressions(flowFile).getValue(); if (demarcator != null) { return new DelimitedStreamKafkaRecordConverter(demarcator.getBytes(StandardCharsets.UTF_8), maxMessageSize, headersFactory); } return new FlowFileStreamKafkaRecordConverter(maxMessageSize, headersFactory, keyFactory); } private static class PublishCallback implements InputStreamCallback { private final KafkaProducerService producerService; private final PublishContext publishContext; private final KafkaRecordConverter kafkaConverter; private final Map<String, String> attributes; private final long inputLength; public PublishCallback( final KafkaProducerService producerService, final PublishContext publishContext, final KafkaRecordConverter kafkaConverter, final Map<String, String> attributes, final long inputLength) { this.producerService = producerService; this.publishContext = publishContext; this.kafkaConverter = kafkaConverter; this.attributes = attributes; this.inputLength = inputLength; } @Override public void process(final InputStream in) { try { final Iterator<KafkaRecord> records = kafkaConverter.convert(attributes, in, inputLength); producerService.send(records, publishContext); } catch (final Exception e) { publishContext.setException(e); // on data pre-process failure, indicate this to controller service producerService.send(Collections.emptyIterator(), publishContext); } } } private static class ExpressionLanguagePartitioner implements KafkaRecordPartitioner { private final PropertyValue partitionPropertyValue; public ExpressionLanguagePartitioner(final PropertyValue partitionPropertyValue) { this.partitionPropertyValue = partitionPropertyValue; } @Override public long partition(final String topic, final FlowFile flowFile) { final String partition = partitionPropertyValue.evaluateAttributeExpressions(flowFile).getValue(); return Objects.hashCode(partition); } } private static class RoundRobinPartitioner implements KafkaRecordPartitioner { private final ConcurrentMap<String, AtomicLong> partitionCounters = new ConcurrentHashMap<>(); @Override public long partition(final String topic, final FlowFile flowFile) { final AtomicLong counter = partitionCounters.computeIfAbsent(topic, t -> new AtomicLong(0)); return counter.getAndIncrement(); } } }
googleapis/google-cloud-java
35,028
java-monitoring/proto-google-cloud-monitoring-v3/src/main/java/com/google/monitoring/v3/ServiceMonitoringProto.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/monitoring/v3/service.proto // Protobuf Java Version: 3.25.8 package com.google.monitoring.v3; public final class ServiceMonitoringProto { private ServiceMonitoringProto() {} public static void registerAllExtensions(com.google.protobuf.ExtensionRegistryLite registry) {} public static void registerAllExtensions(com.google.protobuf.ExtensionRegistry registry) { registerAllExtensions((com.google.protobuf.ExtensionRegistryLite) registry); } static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_Service_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_Service_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_Service_Custom_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_Service_Custom_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_Service_AppEngine_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_Service_AppEngine_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_Service_CloudEndpoints_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_Service_CloudEndpoints_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_Service_ClusterIstio_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_Service_ClusterIstio_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_Service_MeshIstio_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_Service_MeshIstio_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_Service_IstioCanonicalService_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_Service_IstioCanonicalService_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_Service_CloudRun_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_Service_CloudRun_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_Service_GkeNamespace_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_Service_GkeNamespace_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_Service_GkeWorkload_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_Service_GkeWorkload_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_Service_GkeService_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_Service_GkeService_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_Service_BasicService_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_Service_BasicService_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_Service_BasicService_ServiceLabelsEntry_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_Service_BasicService_ServiceLabelsEntry_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_Service_Telemetry_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_Service_Telemetry_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_Service_UserLabelsEntry_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_Service_UserLabelsEntry_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_ServiceLevelObjective_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_ServiceLevelObjective_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_ServiceLevelObjective_UserLabelsEntry_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_ServiceLevelObjective_UserLabelsEntry_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_ServiceLevelIndicator_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_ServiceLevelIndicator_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_BasicSli_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_BasicSli_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_BasicSli_AvailabilityCriteria_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_BasicSli_AvailabilityCriteria_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_BasicSli_LatencyCriteria_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_BasicSli_LatencyCriteria_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_Range_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_Range_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_RequestBasedSli_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_RequestBasedSli_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_TimeSeriesRatio_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_TimeSeriesRatio_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_DistributionCut_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_DistributionCut_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_WindowsBasedSli_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_WindowsBasedSli_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_WindowsBasedSli_PerformanceThreshold_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_WindowsBasedSli_PerformanceThreshold_fieldAccessorTable; static final com.google.protobuf.Descriptors.Descriptor internal_static_google_monitoring_v3_WindowsBasedSli_MetricRange_descriptor; static final com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internal_static_google_monitoring_v3_WindowsBasedSli_MetricRange_fieldAccessorTable; public static com.google.protobuf.Descriptors.FileDescriptor getDescriptor() { return descriptor; } private static com.google.protobuf.Descriptors.FileDescriptor descriptor; static { java.lang.String[] descriptorData = { "\n" + "\"google/monitoring/v3/service.proto\022\024go" + "ogle.monitoring.v3\032\037google/api/field_beh" + "avior.proto\032\031google/api/resource.proto\032\036" + "google/protobuf/duration.proto\032!google/type/calendar_period.proto\"\224\021\n" + "\007Service\022\021\n" + "\004name\030\001 \001(\tB\003\340A\010\022\024\n" + "\014display_name\030\002 \001(\t\0226\n" + "\006custom\030\006 \001(\0132$.google.monitoring.v3.Service.CustomH\000\022=\n\n" + "app_engine\030\007 \001(\0132\'.google.monitoring.v3.Service.AppEngineH\000\022G\n" + "\017cloud_endpoints\030\010" + " \001(\0132,.google.monitoring.v3.Service.CloudEndpointsH\000\022C\n\r" + "cluster_istio\030\t" + " \001(\0132*.google.monitoring.v3.Service.ClusterIstioH\000\022=\n\n" + "mesh_istio\030\n" + " \001(\0132\'.google.monitoring.v3.Service.MeshIstioH\000\022V\n" + "\027istio_canonical_service\030\013 \001(\01323.go" + "ogle.monitoring.v3.Service.IstioCanonicalServiceH\000\022;\n" + "\tcloud_run\030\014 \001(\0132&.google.monitoring.v3.Service.CloudRunH\000\022C\n\r" + "gke_namespace\030\017" + " \001(\0132*.google.monitoring.v3.Service.GkeNamespaceH\000\022A\n" + "\014gke_workload\030\020 \001" + "(\0132).google.monitoring.v3.Service.GkeWorkloadH\000\022?\n" + "\013gke_service\030\021" + " \001(\0132(.google.monitoring.v3.Service.GkeServiceH\000\022A\n\r" + "basic_service\030\023 \001(\0132*.google.monitoring.v3.Service.BasicService\022:\n" + "\ttelemetry\030\r" + " \001(\0132\'.google.monitoring.v3.Service.Telemetry\022B\n" + "\013user_labels\030\016" + " \003(\0132-.google.monitoring.v3.Service.UserLabelsEntry\032\010\n" + "\006Custom\032\036\n" + "\tAppEngine\022\021\n" + "\tmodule_id\030\001 \001(\t\032!\n" + "\016CloudEndpoints\022\017\n" + "\007service\030\001 \001(\t\032g\n" + "\014ClusterIstio\022\020\n" + "\010location\030\001 \001(\t\022\024\n" + "\014cluster_name\030\002 \001(\t\022\031\n" + "\021service_namespace\030\003 \001(\t\022\024\n" + "\014service_name\030\004 \001(\t\032N\n" + "\tMeshIstio\022\020\n" + "\010mesh_uid\030\001 \001(\t\022\031\n" + "\021service_namespace\030\003 \001(\t\022\024\n" + "\014service_name\030\004 \001(\t\032i\n" + "\025IstioCanonicalService\022\020\n" + "\010mesh_uid\030\001 \001(\t\022#\n" + "\033canonical_service_namespace\030\003 \001(\t\022\031\n" + "\021canonical_service\030\004 \001(\t\0322\n" + "\010CloudRun\022\024\n" + "\014service_name\030\001 \001(\t\022\020\n" + "\010location\030\002 \001(\t\032g\n" + "\014GkeNamespace\022\027\n\n" + "project_id\030\001 \001(\tB\003\340A\003\022\020\n" + "\010location\030\002 \001(\t\022\024\n" + "\014cluster_name\030\003 \001(\t\022\026\n" + "\016namespace_name\030\004 \001(\t\032\254\001\n" + "\013GkeWorkload\022\027\n\n" + "project_id\030\001 \001(\tB\003\340A\003\022\020\n" + "\010location\030\002 \001(\t\022\024\n" + "\014cluster_name\030\003 \001(\t\022\026\n" + "\016namespace_name\030\004 \001(\t\022!\n" + "\031top_level_controller_type\030\005 \001(\t\022!\n" + "\031top_level_controller_name\030\006 \001(\t\032{\n\n" + "GkeService\022\027\n\n" + "project_id\030\001 \001(\tB\003\340A\003\022\020\n" + "\010location\030\002 \001(\t\022\024\n" + "\014cluster_name\030\003 \001(\t\022\026\n" + "\016namespace_name\030\004 \001(\t\022\024\n" + "\014service_name\030\005 \001(\t\032\261\001\n" + "\014BasicService\022\024\n" + "\014service_type\030\001 \001(\t\022U\n" + "\016service_labels\030\002 \003(\0132=.g" + "oogle.monitoring.v3.Service.BasicService.ServiceLabelsEntry\0324\n" + "\022ServiceLabelsEntry\022\013\n" + "\003key\030\001 \001(\t\022\r\n" + "\005value\030\002 \001(\t:\0028\001\032\"\n" + "\tTelemetry\022\025\n\r" + "resource_name\030\001 \001(\t\0321\n" + "\017UserLabelsEntry\022\013\n" + "\003key\030\001 \001(\t\022\r\n" + "\005value\030\002 \001(\t:\0028\001:\247\001\352A\243\001\n" + "!monitoring.googleapis.com/Service\022%projects/{project}/services/{service" + "}\022/organizations/{organization}/services" + "/{service}\022#folders/{folder}/services/{service}\022\001*B\014\n\n" + "identifier\"\233\006\n" + "\025ServiceLevelObjective\022\021\n" + "\004name\030\001 \001(\tB\003\340A\010\022\024\n" + "\014display_name\030\013 \001(\t\022L\n" + "\027service_level_indicator\030\003" + " \001(\0132+.google.monitoring.v3.ServiceLevelIndicator\022\014\n" + "\004goal\030\004 \001(\001\0223\n" + "\016rolling_period\030\005 \001(\0132\031.google.protobuf.DurationH\000\0226\n" + "\017calendar_period\030\006 \001(\0162\033.google.type.CalendarPeriodH\000\022P\n" + "\013user_labels\030\014 \003(\0132;.goog" + "le.monitoring.v3.ServiceLevelObjective.UserLabelsEntry\0321\n" + "\017UserLabelsEntry\022\013\n" + "\003key\030\001 \001(\t\022\r\n" + "\005value\030\002 \001(\t:\0028\001\"4\n" + "\004View\022\024\n" + "\020VIEW_UNSPECIFIED\020\000\022\010\n" + "\004FULL\020\002\022\014\n" + "\010EXPLICIT\020\001:\312\002\352A\306\002\n" + "/monitoring.googleapis.com/ServiceLevelObjective\022Vprojects/{project}/serv" + "ices/{service}/serviceLevelObjectives/{service_level_objective}\022`organizations/{" + "organization}/services/{service}/serviceLevelObjectives/{service_level_objective" + "}\022Tfolders/{folder}/services/{service}/s" + "erviceLevelObjectives/{service_level_objective}\022\001* \001B\010\n" + "\006period\"\324\001\n" + "\025ServiceLevelIndicator\0223\n" + "\tbasic_sli\030\004 \001(\0132\036.google.monitoring.v3.BasicSliH\000\022>\n\r" + "request_based\030\001 \001(\0132%.google.monitoring.v3.RequestBasedSliH\000\022>\n\r" + "windows_based\030\002 \001(\0132%.google.monitoring.v3.WindowsBasedSliH\000B\006\n" + "\004type\"\266\002\n" + "\010BasicSli\022\016\n" + "\006method\030\007 \003(\t\022\020\n" + "\010location\030\010 \003(\t\022\017\n" + "\007version\030\t \003(\t\022K\n" + "\014availability\030\002 " + "\001(\01323.google.monitoring.v3.BasicSli.AvailabilityCriteriaH\000\022A\n" + "\007latency\030\003 \001(\0132..go" + "ogle.monitoring.v3.BasicSli.LatencyCriteriaH\000\032\026\n" + "\024AvailabilityCriteria\032?\n" + "\017LatencyCriteria\022,\n" + "\tthreshold\030\003 \001(\0132\031.google.protobuf.DurationB\016\n" + "\014sli_criteria\"!\n" + "\005Range\022\013\n" + "\003min\030\001 \001(\001\022\013\n" + "\003max\030\002 \001(\001\"\241\001\n" + "\017RequestBasedSli\022A\n" + "\020good_total_ratio\030\001" + " \001(\0132%.google.monitoring.v3.TimeSeriesRatioH\000\022A\n" + "\020distribution_cut\030\003" + " \001(\0132%.google.monitoring.v3.DistributionCutH\000B\010\n" + "\006method\"h\n" + "\017TimeSeriesRatio\022\033\n" + "\023good_service_filter\030\004 \001(\t\022\032\n" + "\022bad_service_filter\030\005 \001(\t\022\034\n" + "\024total_service_filter\030\006 \001(\t\"Z\n" + "\017DistributionCut\022\033\n" + "\023distribution_filter\030\004 \001(\t\022*\n" + "\005range\030\005 \001(\0132\033.google.monitoring.v3.Range\"\203\005\n" + "\017WindowsBasedSli\022 \n" + "\026good_bad_metric_filter\030\005 \001(\tH\000\022`\n" + "\032good_total_ratio_threshold\030\002 \001(\0132:." + "google.monitoring.v3.WindowsBasedSli.PerformanceThresholdH\000\022Q\n" + "\024metric_mean_in_range\030\006" + " \001(\01321.google.monitoring.v3.WindowsBasedSli.MetricRangeH\000\022P\n" + "\023metric_sum_in_range\030\007" + " \001(\01321.google.monitoring.v3.WindowsBasedSli.MetricRangeH\000\0220\n\r" + "window_period\030\004 \001(\0132\031.google.protobuf.Duration\032\260\001\n" + "\024PerformanceThreshold\022<\n" + "\013performance\030\001 \001(\0132%.google.monitoring.v3.RequestBasedSliH\000\022?\n" + "\025basic_sli_performance\030\003" + " \001(\0132\036.google.monitoring.v3.BasicSliH\000\022\021\n" + "\tthreshold\030\002 \001(\001B\006\n" + "\004type\032N\n" + "\013MetricRange\022\023\n" + "\013time_series\030\001 \001(\t\022*\n" + "\005range\030\004 \001(\0132\033.google.monitoring.v3.RangeB\022\n" + "\020window_criterionB\321\001\n" + "\030com.google.monitoring.v3B\026ServiceMonitorin" + "gProtoP\001ZAcloud.google.com/go/monitoring" + "/apiv3/v2/monitoringpb;monitoringpb\252\002\032Go" + "ogle.Cloud.Monitoring.V3\312\002\032Google\\Cloud\\" + "Monitoring\\V3\352\002\035Google::Cloud::Monitoring::V3b\006proto3" }; descriptor = com.google.protobuf.Descriptors.FileDescriptor.internalBuildGeneratedFileFrom( descriptorData, new com.google.protobuf.Descriptors.FileDescriptor[] { com.google.api.FieldBehaviorProto.getDescriptor(), com.google.api.ResourceProto.getDescriptor(), com.google.protobuf.DurationProto.getDescriptor(), com.google.type.CalendarPeriodProto.getDescriptor(), }); internal_static_google_monitoring_v3_Service_descriptor = getDescriptor().getMessageTypes().get(0); internal_static_google_monitoring_v3_Service_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_Service_descriptor, new java.lang.String[] { "Name", "DisplayName", "Custom", "AppEngine", "CloudEndpoints", "ClusterIstio", "MeshIstio", "IstioCanonicalService", "CloudRun", "GkeNamespace", "GkeWorkload", "GkeService", "BasicService", "Telemetry", "UserLabels", "Identifier", }); internal_static_google_monitoring_v3_Service_Custom_descriptor = internal_static_google_monitoring_v3_Service_descriptor.getNestedTypes().get(0); internal_static_google_monitoring_v3_Service_Custom_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_Service_Custom_descriptor, new java.lang.String[] {}); internal_static_google_monitoring_v3_Service_AppEngine_descriptor = internal_static_google_monitoring_v3_Service_descriptor.getNestedTypes().get(1); internal_static_google_monitoring_v3_Service_AppEngine_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_Service_AppEngine_descriptor, new java.lang.String[] { "ModuleId", }); internal_static_google_monitoring_v3_Service_CloudEndpoints_descriptor = internal_static_google_monitoring_v3_Service_descriptor.getNestedTypes().get(2); internal_static_google_monitoring_v3_Service_CloudEndpoints_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_Service_CloudEndpoints_descriptor, new java.lang.String[] { "Service", }); internal_static_google_monitoring_v3_Service_ClusterIstio_descriptor = internal_static_google_monitoring_v3_Service_descriptor.getNestedTypes().get(3); internal_static_google_monitoring_v3_Service_ClusterIstio_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_Service_ClusterIstio_descriptor, new java.lang.String[] { "Location", "ClusterName", "ServiceNamespace", "ServiceName", }); internal_static_google_monitoring_v3_Service_MeshIstio_descriptor = internal_static_google_monitoring_v3_Service_descriptor.getNestedTypes().get(4); internal_static_google_monitoring_v3_Service_MeshIstio_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_Service_MeshIstio_descriptor, new java.lang.String[] { "MeshUid", "ServiceNamespace", "ServiceName", }); internal_static_google_monitoring_v3_Service_IstioCanonicalService_descriptor = internal_static_google_monitoring_v3_Service_descriptor.getNestedTypes().get(5); internal_static_google_monitoring_v3_Service_IstioCanonicalService_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_Service_IstioCanonicalService_descriptor, new java.lang.String[] { "MeshUid", "CanonicalServiceNamespace", "CanonicalService", }); internal_static_google_monitoring_v3_Service_CloudRun_descriptor = internal_static_google_monitoring_v3_Service_descriptor.getNestedTypes().get(6); internal_static_google_monitoring_v3_Service_CloudRun_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_Service_CloudRun_descriptor, new java.lang.String[] { "ServiceName", "Location", }); internal_static_google_monitoring_v3_Service_GkeNamespace_descriptor = internal_static_google_monitoring_v3_Service_descriptor.getNestedTypes().get(7); internal_static_google_monitoring_v3_Service_GkeNamespace_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_Service_GkeNamespace_descriptor, new java.lang.String[] { "ProjectId", "Location", "ClusterName", "NamespaceName", }); internal_static_google_monitoring_v3_Service_GkeWorkload_descriptor = internal_static_google_monitoring_v3_Service_descriptor.getNestedTypes().get(8); internal_static_google_monitoring_v3_Service_GkeWorkload_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_Service_GkeWorkload_descriptor, new java.lang.String[] { "ProjectId", "Location", "ClusterName", "NamespaceName", "TopLevelControllerType", "TopLevelControllerName", }); internal_static_google_monitoring_v3_Service_GkeService_descriptor = internal_static_google_monitoring_v3_Service_descriptor.getNestedTypes().get(9); internal_static_google_monitoring_v3_Service_GkeService_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_Service_GkeService_descriptor, new java.lang.String[] { "ProjectId", "Location", "ClusterName", "NamespaceName", "ServiceName", }); internal_static_google_monitoring_v3_Service_BasicService_descriptor = internal_static_google_monitoring_v3_Service_descriptor.getNestedTypes().get(10); internal_static_google_monitoring_v3_Service_BasicService_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_Service_BasicService_descriptor, new java.lang.String[] { "ServiceType", "ServiceLabels", }); internal_static_google_monitoring_v3_Service_BasicService_ServiceLabelsEntry_descriptor = internal_static_google_monitoring_v3_Service_BasicService_descriptor .getNestedTypes() .get(0); internal_static_google_monitoring_v3_Service_BasicService_ServiceLabelsEntry_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_Service_BasicService_ServiceLabelsEntry_descriptor, new java.lang.String[] { "Key", "Value", }); internal_static_google_monitoring_v3_Service_Telemetry_descriptor = internal_static_google_monitoring_v3_Service_descriptor.getNestedTypes().get(11); internal_static_google_monitoring_v3_Service_Telemetry_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_Service_Telemetry_descriptor, new java.lang.String[] { "ResourceName", }); internal_static_google_monitoring_v3_Service_UserLabelsEntry_descriptor = internal_static_google_monitoring_v3_Service_descriptor.getNestedTypes().get(12); internal_static_google_monitoring_v3_Service_UserLabelsEntry_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_Service_UserLabelsEntry_descriptor, new java.lang.String[] { "Key", "Value", }); internal_static_google_monitoring_v3_ServiceLevelObjective_descriptor = getDescriptor().getMessageTypes().get(1); internal_static_google_monitoring_v3_ServiceLevelObjective_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_ServiceLevelObjective_descriptor, new java.lang.String[] { "Name", "DisplayName", "ServiceLevelIndicator", "Goal", "RollingPeriod", "CalendarPeriod", "UserLabels", "Period", }); internal_static_google_monitoring_v3_ServiceLevelObjective_UserLabelsEntry_descriptor = internal_static_google_monitoring_v3_ServiceLevelObjective_descriptor .getNestedTypes() .get(0); internal_static_google_monitoring_v3_ServiceLevelObjective_UserLabelsEntry_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_ServiceLevelObjective_UserLabelsEntry_descriptor, new java.lang.String[] { "Key", "Value", }); internal_static_google_monitoring_v3_ServiceLevelIndicator_descriptor = getDescriptor().getMessageTypes().get(2); internal_static_google_monitoring_v3_ServiceLevelIndicator_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_ServiceLevelIndicator_descriptor, new java.lang.String[] { "BasicSli", "RequestBased", "WindowsBased", "Type", }); internal_static_google_monitoring_v3_BasicSli_descriptor = getDescriptor().getMessageTypes().get(3); internal_static_google_monitoring_v3_BasicSli_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_BasicSli_descriptor, new java.lang.String[] { "Method", "Location", "Version", "Availability", "Latency", "SliCriteria", }); internal_static_google_monitoring_v3_BasicSli_AvailabilityCriteria_descriptor = internal_static_google_monitoring_v3_BasicSli_descriptor.getNestedTypes().get(0); internal_static_google_monitoring_v3_BasicSli_AvailabilityCriteria_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_BasicSli_AvailabilityCriteria_descriptor, new java.lang.String[] {}); internal_static_google_monitoring_v3_BasicSli_LatencyCriteria_descriptor = internal_static_google_monitoring_v3_BasicSli_descriptor.getNestedTypes().get(1); internal_static_google_monitoring_v3_BasicSli_LatencyCriteria_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_BasicSli_LatencyCriteria_descriptor, new java.lang.String[] { "Threshold", }); internal_static_google_monitoring_v3_Range_descriptor = getDescriptor().getMessageTypes().get(4); internal_static_google_monitoring_v3_Range_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_Range_descriptor, new java.lang.String[] { "Min", "Max", }); internal_static_google_monitoring_v3_RequestBasedSli_descriptor = getDescriptor().getMessageTypes().get(5); internal_static_google_monitoring_v3_RequestBasedSli_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_RequestBasedSli_descriptor, new java.lang.String[] { "GoodTotalRatio", "DistributionCut", "Method", }); internal_static_google_monitoring_v3_TimeSeriesRatio_descriptor = getDescriptor().getMessageTypes().get(6); internal_static_google_monitoring_v3_TimeSeriesRatio_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_TimeSeriesRatio_descriptor, new java.lang.String[] { "GoodServiceFilter", "BadServiceFilter", "TotalServiceFilter", }); internal_static_google_monitoring_v3_DistributionCut_descriptor = getDescriptor().getMessageTypes().get(7); internal_static_google_monitoring_v3_DistributionCut_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_DistributionCut_descriptor, new java.lang.String[] { "DistributionFilter", "Range", }); internal_static_google_monitoring_v3_WindowsBasedSli_descriptor = getDescriptor().getMessageTypes().get(8); internal_static_google_monitoring_v3_WindowsBasedSli_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_WindowsBasedSli_descriptor, new java.lang.String[] { "GoodBadMetricFilter", "GoodTotalRatioThreshold", "MetricMeanInRange", "MetricSumInRange", "WindowPeriod", "WindowCriterion", }); internal_static_google_monitoring_v3_WindowsBasedSli_PerformanceThreshold_descriptor = internal_static_google_monitoring_v3_WindowsBasedSli_descriptor.getNestedTypes().get(0); internal_static_google_monitoring_v3_WindowsBasedSli_PerformanceThreshold_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_WindowsBasedSli_PerformanceThreshold_descriptor, new java.lang.String[] { "Performance", "BasicSliPerformance", "Threshold", "Type", }); internal_static_google_monitoring_v3_WindowsBasedSli_MetricRange_descriptor = internal_static_google_monitoring_v3_WindowsBasedSli_descriptor.getNestedTypes().get(1); internal_static_google_monitoring_v3_WindowsBasedSli_MetricRange_fieldAccessorTable = new com.google.protobuf.GeneratedMessageV3.FieldAccessorTable( internal_static_google_monitoring_v3_WindowsBasedSli_MetricRange_descriptor, new java.lang.String[] { "TimeSeries", "Range", }); com.google.protobuf.ExtensionRegistry registry = com.google.protobuf.ExtensionRegistry.newInstance(); registry.add(com.google.api.FieldBehaviorProto.fieldBehavior); registry.add(com.google.api.ResourceProto.resource); com.google.protobuf.Descriptors.FileDescriptor.internalUpdateFileDescriptor( descriptor, registry); com.google.api.FieldBehaviorProto.getDescriptor(); com.google.api.ResourceProto.getDescriptor(); com.google.protobuf.DurationProto.getDescriptor(); com.google.type.CalendarPeriodProto.getDescriptor(); } // @@protoc_insertion_point(outer_class_scope) }
googleapis/google-cloud-java
35,942
java-orchestration-airflow/proto-google-cloud-orchestration-airflow-v1beta1/src/main/java/com/google/cloud/orchestration/airflow/service/v1beta1/CreateUserWorkloadsSecretRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/orchestration/airflow/service/v1beta1/environments.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.orchestration.airflow.service.v1beta1; /** * * * <pre> * Create user workloads Secret request. * </pre> * * Protobuf type {@code * google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsSecretRequest} */ public final class CreateUserWorkloadsSecretRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsSecretRequest) CreateUserWorkloadsSecretRequestOrBuilder { private static final long serialVersionUID = 0L; // Use CreateUserWorkloadsSecretRequest.newBuilder() to construct. private CreateUserWorkloadsSecretRequest( com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private CreateUserWorkloadsSecretRequest() { parent_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new CreateUserWorkloadsSecretRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.orchestration.airflow.service.v1beta1.EnvironmentsOuterClass .internal_static_google_cloud_orchestration_airflow_service_v1beta1_CreateUserWorkloadsSecretRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.orchestration.airflow.service.v1beta1.EnvironmentsOuterClass .internal_static_google_cloud_orchestration_airflow_service_v1beta1_CreateUserWorkloadsSecretRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsSecretRequest .class, com.google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsSecretRequest .Builder.class); } private int bitField0_; public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The environment name to create a Secret for, in the form: * "projects/{projectId}/locations/{locationId}/environments/{environmentId}" * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The environment name to create a Secret for, in the form: * "projects/{projectId}/locations/{locationId}/environments/{environmentId}" * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int USER_WORKLOADS_SECRET_FIELD_NUMBER = 2; private com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret userWorkloadsSecret_; /** * * * <pre> * Required. User workloads Secret to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret user_workloads_secret = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the userWorkloadsSecret field is set. */ @java.lang.Override public boolean hasUserWorkloadsSecret() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. User workloads Secret to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret user_workloads_secret = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The userWorkloadsSecret. */ @java.lang.Override public com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret getUserWorkloadsSecret() { return userWorkloadsSecret_ == null ? com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret .getDefaultInstance() : userWorkloadsSecret_; } /** * * * <pre> * Required. User workloads Secret to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret user_workloads_secret = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecretOrBuilder getUserWorkloadsSecretOrBuilder() { return userWorkloadsSecret_ == null ? com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret .getDefaultInstance() : userWorkloadsSecret_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getUserWorkloadsSecret()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUserWorkloadsSecret()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsSecretRequest)) { return super.equals(obj); } com.google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsSecretRequest other = (com.google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsSecretRequest) obj; if (!getParent().equals(other.getParent())) return false; if (hasUserWorkloadsSecret() != other.hasUserWorkloadsSecret()) return false; if (hasUserWorkloadsSecret()) { if (!getUserWorkloadsSecret().equals(other.getUserWorkloadsSecret())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (hasUserWorkloadsSecret()) { hash = (37 * hash) + USER_WORKLOADS_SECRET_FIELD_NUMBER; hash = (53 * hash) + getUserWorkloadsSecret().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsSecretRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsSecretRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsSecretRequest parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsSecretRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsSecretRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsSecretRequest parseFrom(byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsSecretRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsSecretRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsSecretRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsSecretRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsSecretRequest parseFrom(com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsSecretRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsSecretRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Create user workloads Secret request. * </pre> * * Protobuf type {@code * google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsSecretRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsSecretRequest) com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsSecretRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.orchestration.airflow.service.v1beta1.EnvironmentsOuterClass .internal_static_google_cloud_orchestration_airflow_service_v1beta1_CreateUserWorkloadsSecretRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.orchestration.airflow.service.v1beta1.EnvironmentsOuterClass .internal_static_google_cloud_orchestration_airflow_service_v1beta1_CreateUserWorkloadsSecretRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsSecretRequest.class, com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsSecretRequest.Builder.class); } // Construct using // com.google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsSecretRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getUserWorkloadsSecretFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; userWorkloadsSecret_ = null; if (userWorkloadsSecretBuilder_ != null) { userWorkloadsSecretBuilder_.dispose(); userWorkloadsSecretBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.orchestration.airflow.service.v1beta1.EnvironmentsOuterClass .internal_static_google_cloud_orchestration_airflow_service_v1beta1_CreateUserWorkloadsSecretRequest_descriptor; } @java.lang.Override public com.google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsSecretRequest getDefaultInstanceForType() { return com.google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsSecretRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsSecretRequest build() { com.google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsSecretRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsSecretRequest buildPartial() { com.google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsSecretRequest result = new com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsSecretRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsSecretRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.userWorkloadsSecret_ = userWorkloadsSecretBuilder_ == null ? userWorkloadsSecret_ : userWorkloadsSecretBuilder_.build(); to_bitField0_ |= 0x00000001; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsSecretRequest) { return mergeFrom( (com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsSecretRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsSecretRequest other) { if (other == com.google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsSecretRequest .getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasUserWorkloadsSecret()) { mergeUserWorkloadsSecret(other.getUserWorkloadsSecret()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage( getUserWorkloadsSecretFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The environment name to create a Secret for, in the form: * "projects/{projectId}/locations/{locationId}/environments/{environmentId}" * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The environment name to create a Secret for, in the form: * "projects/{projectId}/locations/{locationId}/environments/{environmentId}" * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The environment name to create a Secret for, in the form: * "projects/{projectId}/locations/{locationId}/environments/{environmentId}" * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The environment name to create a Secret for, in the form: * "projects/{projectId}/locations/{locationId}/environments/{environmentId}" * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The environment name to create a Secret for, in the form: * "projects/{projectId}/locations/{locationId}/environments/{environmentId}" * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret userWorkloadsSecret_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret, com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret.Builder, com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecretOrBuilder> userWorkloadsSecretBuilder_; /** * * * <pre> * Required. User workloads Secret to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret user_workloads_secret = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the userWorkloadsSecret field is set. */ public boolean hasUserWorkloadsSecret() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. User workloads Secret to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret user_workloads_secret = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The userWorkloadsSecret. */ public com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret getUserWorkloadsSecret() { if (userWorkloadsSecretBuilder_ == null) { return userWorkloadsSecret_ == null ? com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret .getDefaultInstance() : userWorkloadsSecret_; } else { return userWorkloadsSecretBuilder_.getMessage(); } } /** * * * <pre> * Required. User workloads Secret to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret user_workloads_secret = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUserWorkloadsSecret( com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret value) { if (userWorkloadsSecretBuilder_ == null) { if (value == null) { throw new NullPointerException(); } userWorkloadsSecret_ = value; } else { userWorkloadsSecretBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. User workloads Secret to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret user_workloads_secret = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUserWorkloadsSecret( com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret.Builder builderForValue) { if (userWorkloadsSecretBuilder_ == null) { userWorkloadsSecret_ = builderForValue.build(); } else { userWorkloadsSecretBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. User workloads Secret to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret user_workloads_secret = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUserWorkloadsSecret( com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret value) { if (userWorkloadsSecretBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && userWorkloadsSecret_ != null && userWorkloadsSecret_ != com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret .getDefaultInstance()) { getUserWorkloadsSecretBuilder().mergeFrom(value); } else { userWorkloadsSecret_ = value; } } else { userWorkloadsSecretBuilder_.mergeFrom(value); } if (userWorkloadsSecret_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. User workloads Secret to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret user_workloads_secret = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUserWorkloadsSecret() { bitField0_ = (bitField0_ & ~0x00000002); userWorkloadsSecret_ = null; if (userWorkloadsSecretBuilder_ != null) { userWorkloadsSecretBuilder_.dispose(); userWorkloadsSecretBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. User workloads Secret to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret user_workloads_secret = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret.Builder getUserWorkloadsSecretBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUserWorkloadsSecretFieldBuilder().getBuilder(); } /** * * * <pre> * Required. User workloads Secret to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret user_workloads_secret = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecretOrBuilder getUserWorkloadsSecretOrBuilder() { if (userWorkloadsSecretBuilder_ != null) { return userWorkloadsSecretBuilder_.getMessageOrBuilder(); } else { return userWorkloadsSecret_ == null ? com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret .getDefaultInstance() : userWorkloadsSecret_; } } /** * * * <pre> * Required. User workloads Secret to create. * </pre> * * <code> * .google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret user_workloads_secret = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret, com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret.Builder, com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecretOrBuilder> getUserWorkloadsSecretFieldBuilder() { if (userWorkloadsSecretBuilder_ == null) { userWorkloadsSecretBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret, com.google.cloud.orchestration.airflow.service.v1beta1.UserWorkloadsSecret.Builder, com.google.cloud.orchestration.airflow.service.v1beta1 .UserWorkloadsSecretOrBuilder>( getUserWorkloadsSecret(), getParentForChildren(), isClean()); userWorkloadsSecret_ = null; } return userWorkloadsSecretBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsSecretRequest) } // @@protoc_insertion_point(class_scope:google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsSecretRequest) private static final com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsSecretRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsSecretRequest(); } public static com.google.cloud.orchestration.airflow.service.v1beta1 .CreateUserWorkloadsSecretRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<CreateUserWorkloadsSecretRequest> PARSER = new com.google.protobuf.AbstractParser<CreateUserWorkloadsSecretRequest>() { @java.lang.Override public CreateUserWorkloadsSecretRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<CreateUserWorkloadsSecretRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<CreateUserWorkloadsSecretRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.orchestration.airflow.service.v1beta1.CreateUserWorkloadsSecretRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,922
java-apihub/proto-google-cloud-apihub-v1/src/main/java/com/google/cloud/apihub/v1/ListCurationsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/apihub/v1/curate_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.apihub.v1; /** * * * <pre> * The [ListCurations][ApiHub.ListCurations] method's response. * </pre> * * Protobuf type {@code google.cloud.apihub.v1.ListCurationsResponse} */ public final class ListCurationsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.apihub.v1.ListCurationsResponse) ListCurationsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListCurationsResponse.newBuilder() to construct. private ListCurationsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListCurationsResponse() { curations_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListCurationsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.apihub.v1.CurateServiceProto .internal_static_google_cloud_apihub_v1_ListCurationsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.apihub.v1.CurateServiceProto .internal_static_google_cloud_apihub_v1_ListCurationsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.apihub.v1.ListCurationsResponse.class, com.google.cloud.apihub.v1.ListCurationsResponse.Builder.class); } public static final int CURATIONS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.apihub.v1.Curation> curations_; /** * * * <pre> * The curation resources present in the API hub. * </pre> * * <code>repeated .google.cloud.apihub.v1.Curation curations = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.apihub.v1.Curation> getCurationsList() { return curations_; } /** * * * <pre> * The curation resources present in the API hub. * </pre> * * <code>repeated .google.cloud.apihub.v1.Curation curations = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.apihub.v1.CurationOrBuilder> getCurationsOrBuilderList() { return curations_; } /** * * * <pre> * The curation resources present in the API hub. * </pre> * * <code>repeated .google.cloud.apihub.v1.Curation curations = 1;</code> */ @java.lang.Override public int getCurationsCount() { return curations_.size(); } /** * * * <pre> * The curation resources present in the API hub. * </pre> * * <code>repeated .google.cloud.apihub.v1.Curation curations = 1;</code> */ @java.lang.Override public com.google.cloud.apihub.v1.Curation getCurations(int index) { return curations_.get(index); } /** * * * <pre> * The curation resources present in the API hub. * </pre> * * <code>repeated .google.cloud.apihub.v1.Curation curations = 1;</code> */ @java.lang.Override public com.google.cloud.apihub.v1.CurationOrBuilder getCurationsOrBuilder(int index) { return curations_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < curations_.size(); i++) { output.writeMessage(1, curations_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < curations_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, curations_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.apihub.v1.ListCurationsResponse)) { return super.equals(obj); } com.google.cloud.apihub.v1.ListCurationsResponse other = (com.google.cloud.apihub.v1.ListCurationsResponse) obj; if (!getCurationsList().equals(other.getCurationsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getCurationsCount() > 0) { hash = (37 * hash) + CURATIONS_FIELD_NUMBER; hash = (53 * hash) + getCurationsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.apihub.v1.ListCurationsResponse parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.apihub.v1.ListCurationsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.apihub.v1.ListCurationsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.apihub.v1.ListCurationsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.apihub.v1.ListCurationsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.apihub.v1.ListCurationsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.apihub.v1.ListCurationsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.apihub.v1.ListCurationsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.apihub.v1.ListCurationsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.apihub.v1.ListCurationsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.apihub.v1.ListCurationsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.apihub.v1.ListCurationsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.apihub.v1.ListCurationsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The [ListCurations][ApiHub.ListCurations] method's response. * </pre> * * Protobuf type {@code google.cloud.apihub.v1.ListCurationsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.apihub.v1.ListCurationsResponse) com.google.cloud.apihub.v1.ListCurationsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.apihub.v1.CurateServiceProto .internal_static_google_cloud_apihub_v1_ListCurationsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.apihub.v1.CurateServiceProto .internal_static_google_cloud_apihub_v1_ListCurationsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.apihub.v1.ListCurationsResponse.class, com.google.cloud.apihub.v1.ListCurationsResponse.Builder.class); } // Construct using com.google.cloud.apihub.v1.ListCurationsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (curationsBuilder_ == null) { curations_ = java.util.Collections.emptyList(); } else { curations_ = null; curationsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.apihub.v1.CurateServiceProto .internal_static_google_cloud_apihub_v1_ListCurationsResponse_descriptor; } @java.lang.Override public com.google.cloud.apihub.v1.ListCurationsResponse getDefaultInstanceForType() { return com.google.cloud.apihub.v1.ListCurationsResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.apihub.v1.ListCurationsResponse build() { com.google.cloud.apihub.v1.ListCurationsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.apihub.v1.ListCurationsResponse buildPartial() { com.google.cloud.apihub.v1.ListCurationsResponse result = new com.google.cloud.apihub.v1.ListCurationsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.apihub.v1.ListCurationsResponse result) { if (curationsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { curations_ = java.util.Collections.unmodifiableList(curations_); bitField0_ = (bitField0_ & ~0x00000001); } result.curations_ = curations_; } else { result.curations_ = curationsBuilder_.build(); } } private void buildPartial0(com.google.cloud.apihub.v1.ListCurationsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.apihub.v1.ListCurationsResponse) { return mergeFrom((com.google.cloud.apihub.v1.ListCurationsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.apihub.v1.ListCurationsResponse other) { if (other == com.google.cloud.apihub.v1.ListCurationsResponse.getDefaultInstance()) return this; if (curationsBuilder_ == null) { if (!other.curations_.isEmpty()) { if (curations_.isEmpty()) { curations_ = other.curations_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureCurationsIsMutable(); curations_.addAll(other.curations_); } onChanged(); } } else { if (!other.curations_.isEmpty()) { if (curationsBuilder_.isEmpty()) { curationsBuilder_.dispose(); curationsBuilder_ = null; curations_ = other.curations_; bitField0_ = (bitField0_ & ~0x00000001); curationsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getCurationsFieldBuilder() : null; } else { curationsBuilder_.addAllMessages(other.curations_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.apihub.v1.Curation m = input.readMessage( com.google.cloud.apihub.v1.Curation.parser(), extensionRegistry); if (curationsBuilder_ == null) { ensureCurationsIsMutable(); curations_.add(m); } else { curationsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.apihub.v1.Curation> curations_ = java.util.Collections.emptyList(); private void ensureCurationsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { curations_ = new java.util.ArrayList<com.google.cloud.apihub.v1.Curation>(curations_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.apihub.v1.Curation, com.google.cloud.apihub.v1.Curation.Builder, com.google.cloud.apihub.v1.CurationOrBuilder> curationsBuilder_; /** * * * <pre> * The curation resources present in the API hub. * </pre> * * <code>repeated .google.cloud.apihub.v1.Curation curations = 1;</code> */ public java.util.List<com.google.cloud.apihub.v1.Curation> getCurationsList() { if (curationsBuilder_ == null) { return java.util.Collections.unmodifiableList(curations_); } else { return curationsBuilder_.getMessageList(); } } /** * * * <pre> * The curation resources present in the API hub. * </pre> * * <code>repeated .google.cloud.apihub.v1.Curation curations = 1;</code> */ public int getCurationsCount() { if (curationsBuilder_ == null) { return curations_.size(); } else { return curationsBuilder_.getCount(); } } /** * * * <pre> * The curation resources present in the API hub. * </pre> * * <code>repeated .google.cloud.apihub.v1.Curation curations = 1;</code> */ public com.google.cloud.apihub.v1.Curation getCurations(int index) { if (curationsBuilder_ == null) { return curations_.get(index); } else { return curationsBuilder_.getMessage(index); } } /** * * * <pre> * The curation resources present in the API hub. * </pre> * * <code>repeated .google.cloud.apihub.v1.Curation curations = 1;</code> */ public Builder setCurations(int index, com.google.cloud.apihub.v1.Curation value) { if (curationsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCurationsIsMutable(); curations_.set(index, value); onChanged(); } else { curationsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * The curation resources present in the API hub. * </pre> * * <code>repeated .google.cloud.apihub.v1.Curation curations = 1;</code> */ public Builder setCurations( int index, com.google.cloud.apihub.v1.Curation.Builder builderForValue) { if (curationsBuilder_ == null) { ensureCurationsIsMutable(); curations_.set(index, builderForValue.build()); onChanged(); } else { curationsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The curation resources present in the API hub. * </pre> * * <code>repeated .google.cloud.apihub.v1.Curation curations = 1;</code> */ public Builder addCurations(com.google.cloud.apihub.v1.Curation value) { if (curationsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCurationsIsMutable(); curations_.add(value); onChanged(); } else { curationsBuilder_.addMessage(value); } return this; } /** * * * <pre> * The curation resources present in the API hub. * </pre> * * <code>repeated .google.cloud.apihub.v1.Curation curations = 1;</code> */ public Builder addCurations(int index, com.google.cloud.apihub.v1.Curation value) { if (curationsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureCurationsIsMutable(); curations_.add(index, value); onChanged(); } else { curationsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * The curation resources present in the API hub. * </pre> * * <code>repeated .google.cloud.apihub.v1.Curation curations = 1;</code> */ public Builder addCurations(com.google.cloud.apihub.v1.Curation.Builder builderForValue) { if (curationsBuilder_ == null) { ensureCurationsIsMutable(); curations_.add(builderForValue.build()); onChanged(); } else { curationsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * The curation resources present in the API hub. * </pre> * * <code>repeated .google.cloud.apihub.v1.Curation curations = 1;</code> */ public Builder addCurations( int index, com.google.cloud.apihub.v1.Curation.Builder builderForValue) { if (curationsBuilder_ == null) { ensureCurationsIsMutable(); curations_.add(index, builderForValue.build()); onChanged(); } else { curationsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * The curation resources present in the API hub. * </pre> * * <code>repeated .google.cloud.apihub.v1.Curation curations = 1;</code> */ public Builder addAllCurations( java.lang.Iterable<? extends com.google.cloud.apihub.v1.Curation> values) { if (curationsBuilder_ == null) { ensureCurationsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, curations_); onChanged(); } else { curationsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * The curation resources present in the API hub. * </pre> * * <code>repeated .google.cloud.apihub.v1.Curation curations = 1;</code> */ public Builder clearCurations() { if (curationsBuilder_ == null) { curations_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { curationsBuilder_.clear(); } return this; } /** * * * <pre> * The curation resources present in the API hub. * </pre> * * <code>repeated .google.cloud.apihub.v1.Curation curations = 1;</code> */ public Builder removeCurations(int index) { if (curationsBuilder_ == null) { ensureCurationsIsMutable(); curations_.remove(index); onChanged(); } else { curationsBuilder_.remove(index); } return this; } /** * * * <pre> * The curation resources present in the API hub. * </pre> * * <code>repeated .google.cloud.apihub.v1.Curation curations = 1;</code> */ public com.google.cloud.apihub.v1.Curation.Builder getCurationsBuilder(int index) { return getCurationsFieldBuilder().getBuilder(index); } /** * * * <pre> * The curation resources present in the API hub. * </pre> * * <code>repeated .google.cloud.apihub.v1.Curation curations = 1;</code> */ public com.google.cloud.apihub.v1.CurationOrBuilder getCurationsOrBuilder(int index) { if (curationsBuilder_ == null) { return curations_.get(index); } else { return curationsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * The curation resources present in the API hub. * </pre> * * <code>repeated .google.cloud.apihub.v1.Curation curations = 1;</code> */ public java.util.List<? extends com.google.cloud.apihub.v1.CurationOrBuilder> getCurationsOrBuilderList() { if (curationsBuilder_ != null) { return curationsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(curations_); } } /** * * * <pre> * The curation resources present in the API hub. * </pre> * * <code>repeated .google.cloud.apihub.v1.Curation curations = 1;</code> */ public com.google.cloud.apihub.v1.Curation.Builder addCurationsBuilder() { return getCurationsFieldBuilder() .addBuilder(com.google.cloud.apihub.v1.Curation.getDefaultInstance()); } /** * * * <pre> * The curation resources present in the API hub. * </pre> * * <code>repeated .google.cloud.apihub.v1.Curation curations = 1;</code> */ public com.google.cloud.apihub.v1.Curation.Builder addCurationsBuilder(int index) { return getCurationsFieldBuilder() .addBuilder(index, com.google.cloud.apihub.v1.Curation.getDefaultInstance()); } /** * * * <pre> * The curation resources present in the API hub. * </pre> * * <code>repeated .google.cloud.apihub.v1.Curation curations = 1;</code> */ public java.util.List<com.google.cloud.apihub.v1.Curation.Builder> getCurationsBuilderList() { return getCurationsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.apihub.v1.Curation, com.google.cloud.apihub.v1.Curation.Builder, com.google.cloud.apihub.v1.CurationOrBuilder> getCurationsFieldBuilder() { if (curationsBuilder_ == null) { curationsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.apihub.v1.Curation, com.google.cloud.apihub.v1.Curation.Builder, com.google.cloud.apihub.v1.CurationOrBuilder>( curations_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); curations_ = null; } return curationsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token, which can be sent as `page_token` to retrieve the next page. * If this field is omitted, there are no subsequent pages. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.apihub.v1.ListCurationsResponse) } // @@protoc_insertion_point(class_scope:google.cloud.apihub.v1.ListCurationsResponse) private static final com.google.cloud.apihub.v1.ListCurationsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.apihub.v1.ListCurationsResponse(); } public static com.google.cloud.apihub.v1.ListCurationsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListCurationsResponse> PARSER = new com.google.protobuf.AbstractParser<ListCurationsResponse>() { @java.lang.Override public ListCurationsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListCurationsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListCurationsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.apihub.v1.ListCurationsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,864
java-recommendations-ai/proto-google-cloud-recommendations-ai-v1beta1/src/main/java/com/google/cloud/recommendationengine/v1beta1/ListCatalogItemsRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/recommendationengine/v1beta1/catalog_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.recommendationengine.v1beta1; /** * * * <pre> * Request message for ListCatalogItems method. * </pre> * * Protobuf type {@code google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest} */ public final class ListCatalogItemsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest) ListCatalogItemsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListCatalogItemsRequest.newBuilder() to construct. private ListCatalogItemsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListCatalogItemsRequest() { parent_ = ""; pageToken_ = ""; filter_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListCatalogItemsRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.recommendationengine.v1beta1.CatalogServiceOuterClass .internal_static_google_cloud_recommendationengine_v1beta1_ListCatalogItemsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.recommendationengine.v1beta1.CatalogServiceOuterClass .internal_static_google_cloud_recommendationengine_v1beta1_ListCatalogItemsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest.class, com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The parent catalog resource name, such as * `projects/&#42;&#47;locations/global/catalogs/default_catalog`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The parent catalog resource name, such as * `projects/&#42;&#47;locations/global/catalogs/default_catalog`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PAGE_SIZE_FIELD_NUMBER = 2; private int pageSize_ = 0; /** * * * <pre> * Optional. Maximum number of results to return per page. If zero, the * service will choose a reasonable default. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } public static final int PAGE_TOKEN_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. The previous ListCatalogItemsResponse.next_page_token. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ @java.lang.Override public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } } /** * * * <pre> * Optional. The previous ListCatalogItemsResponse.next_page_token. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ @java.lang.Override public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int FILTER_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object filter_ = ""; /** * * * <pre> * Optional. A filter to apply on the list results. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ @java.lang.Override public java.lang.String getFilter() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } } /** * * * <pre> * Optional. A filter to apply on the list results. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ @java.lang.Override public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } if (pageSize_ != 0) { output.writeInt32(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, filter_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } if (pageSize_ != 0) { size += com.google.protobuf.CodedOutputStream.computeInt32Size(2, pageSize_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(pageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, pageToken_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(filter_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, filter_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest)) { return super.equals(obj); } com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest other = (com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest) obj; if (!getParent().equals(other.getParent())) return false; if (getPageSize() != other.getPageSize()) return false; if (!getPageToken().equals(other.getPageToken())) return false; if (!getFilter().equals(other.getFilter())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (37 * hash) + PAGE_SIZE_FIELD_NUMBER; hash = (53 * hash) + getPageSize(); hash = (37 * hash) + PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getPageToken().hashCode(); hash = (37 * hash) + FILTER_FIELD_NUMBER; hash = (53 * hash) + getFilter().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for ListCatalogItems method. * </pre> * * Protobuf type {@code google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest) com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.recommendationengine.v1beta1.CatalogServiceOuterClass .internal_static_google_cloud_recommendationengine_v1beta1_ListCatalogItemsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.recommendationengine.v1beta1.CatalogServiceOuterClass .internal_static_google_cloud_recommendationengine_v1beta1_ListCatalogItemsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest.class, com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest.Builder.class); } // Construct using // com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; pageSize_ = 0; pageToken_ = ""; filter_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.recommendationengine.v1beta1.CatalogServiceOuterClass .internal_static_google_cloud_recommendationengine_v1beta1_ListCatalogItemsRequest_descriptor; } @java.lang.Override public com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest getDefaultInstanceForType() { return com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest build() { com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest buildPartial() { com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest result = new com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.pageSize_ = pageSize_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.pageToken_ = pageToken_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.filter_ = filter_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest) { return mergeFrom( (com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest other) { if (other == com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest .getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (other.getPageSize() != 0) { setPageSize(other.getPageSize()); } if (!other.getPageToken().isEmpty()) { pageToken_ = other.pageToken_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getFilter().isEmpty()) { filter_ = other.filter_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 16: { pageSize_ = input.readInt32(); bitField0_ |= 0x00000002; break; } // case 16 case 26: { pageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { filter_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The parent catalog resource name, such as * `projects/&#42;&#47;locations/global/catalogs/default_catalog`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The parent catalog resource name, such as * `projects/&#42;&#47;locations/global/catalogs/default_catalog`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The parent catalog resource name, such as * `projects/&#42;&#47;locations/global/catalogs/default_catalog`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The parent catalog resource name, such as * `projects/&#42;&#47;locations/global/catalogs/default_catalog`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The parent catalog resource name, such as * `projects/&#42;&#47;locations/global/catalogs/default_catalog`. * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private int pageSize_; /** * * * <pre> * Optional. Maximum number of results to return per page. If zero, the * service will choose a reasonable default. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageSize. */ @java.lang.Override public int getPageSize() { return pageSize_; } /** * * * <pre> * Optional. Maximum number of results to return per page. If zero, the * service will choose a reasonable default. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageSize to set. * @return This builder for chaining. */ public Builder setPageSize(int value) { pageSize_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Maximum number of results to return per page. If zero, the * service will choose a reasonable default. * </pre> * * <code>int32 page_size = 2 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageSize() { bitField0_ = (bitField0_ & ~0x00000002); pageSize_ = 0; onChanged(); return this; } private java.lang.Object pageToken_ = ""; /** * * * <pre> * Optional. The previous ListCatalogItemsResponse.next_page_token. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The pageToken. */ public java.lang.String getPageToken() { java.lang.Object ref = pageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); pageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. The previous ListCatalogItemsResponse.next_page_token. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for pageToken. */ public com.google.protobuf.ByteString getPageTokenBytes() { java.lang.Object ref = pageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); pageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. The previous ListCatalogItemsResponse.next_page_token. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The pageToken to set. * @return This builder for chaining. */ public Builder setPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * Optional. The previous ListCatalogItemsResponse.next_page_token. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearPageToken() { pageToken_ = getDefaultInstance().getPageToken(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * Optional. The previous ListCatalogItemsResponse.next_page_token. * </pre> * * <code>string page_token = 3 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for pageToken to set. * @return This builder for chaining. */ public Builder setPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); pageToken_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object filter_ = ""; /** * * * <pre> * Optional. A filter to apply on the list results. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The filter. */ public java.lang.String getFilter() { java.lang.Object ref = filter_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); filter_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Optional. A filter to apply on the list results. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return The bytes for filter. */ public com.google.protobuf.ByteString getFilterBytes() { java.lang.Object ref = filter_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); filter_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Optional. A filter to apply on the list results. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The filter to set. * @return This builder for chaining. */ public Builder setFilter(java.lang.String value) { if (value == null) { throw new NullPointerException(); } filter_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * Optional. A filter to apply on the list results. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @return This builder for chaining. */ public Builder clearFilter() { filter_ = getDefaultInstance().getFilter(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * Optional. A filter to apply on the list results. * </pre> * * <code>string filter = 4 [(.google.api.field_behavior) = OPTIONAL];</code> * * @param value The bytes for filter to set. * @return This builder for chaining. */ public Builder setFilterBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); filter_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest) } // @@protoc_insertion_point(class_scope:google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest) private static final com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest(); } public static com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListCatalogItemsRequest> PARSER = new com.google.protobuf.AbstractParser<ListCatalogItemsRequest>() { @java.lang.Override public ListCatalogItemsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListCatalogItemsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListCatalogItemsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.recommendationengine.v1beta1.ListCatalogItemsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,980
java-dialogflow/proto-google-cloud-dialogflow-v2beta1/src/main/java/com/google/cloud/dialogflow/v2beta1/UpdateKnowledgeBaseRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dialogflow/v2beta1/knowledge_base.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dialogflow.v2beta1; /** * * * <pre> * Request message for * [KnowledgeBases.UpdateKnowledgeBase][google.cloud.dialogflow.v2beta1.KnowledgeBases.UpdateKnowledgeBase]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest} */ public final class UpdateKnowledgeBaseRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest) UpdateKnowledgeBaseRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateKnowledgeBaseRequest.newBuilder() to construct. private UpdateKnowledgeBaseRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateKnowledgeBaseRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateKnowledgeBaseRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.v2beta1.KnowledgeBaseProto .internal_static_google_cloud_dialogflow_v2beta1_UpdateKnowledgeBaseRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.v2beta1.KnowledgeBaseProto .internal_static_google_cloud_dialogflow_v2beta1_UpdateKnowledgeBaseRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest.class, com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest.Builder.class); } private int bitField0_; public static final int KNOWLEDGE_BASE_FIELD_NUMBER = 1; private com.google.cloud.dialogflow.v2beta1.KnowledgeBase knowledgeBase_; /** * * * <pre> * Required. The knowledge base to update. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_base = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the knowledgeBase field is set. */ @java.lang.Override public boolean hasKnowledgeBase() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The knowledge base to update. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_base = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The knowledgeBase. */ @java.lang.Override public com.google.cloud.dialogflow.v2beta1.KnowledgeBase getKnowledgeBase() { return knowledgeBase_ == null ? com.google.cloud.dialogflow.v2beta1.KnowledgeBase.getDefaultInstance() : knowledgeBase_; } /** * * * <pre> * Required. The knowledge base to update. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_base = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.dialogflow.v2beta1.KnowledgeBaseOrBuilder getKnowledgeBaseOrBuilder() { return knowledgeBase_ == null ? com.google.cloud.dialogflow.v2beta1.KnowledgeBase.getDefaultInstance() : knowledgeBase_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Optional. Not specified means `update all`. * Currently, only `display_name` can be updated, an InvalidArgument will be * returned for attempting to update other fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. Not specified means `update all`. * Currently, only `display_name` can be updated, an InvalidArgument will be * returned for attempting to update other fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Optional. Not specified means `update all`. * Currently, only `display_name` can be updated, an InvalidArgument will be * returned for attempting to update other fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getKnowledgeBase()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getKnowledgeBase()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest)) { return super.equals(obj); } com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest other = (com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest) obj; if (hasKnowledgeBase() != other.hasKnowledgeBase()) return false; if (hasKnowledgeBase()) { if (!getKnowledgeBase().equals(other.getKnowledgeBase())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasKnowledgeBase()) { hash = (37 * hash) + KNOWLEDGE_BASE_FIELD_NUMBER; hash = (53 * hash) + getKnowledgeBase().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [KnowledgeBases.UpdateKnowledgeBase][google.cloud.dialogflow.v2beta1.KnowledgeBases.UpdateKnowledgeBase]. * </pre> * * Protobuf type {@code google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest) com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dialogflow.v2beta1.KnowledgeBaseProto .internal_static_google_cloud_dialogflow_v2beta1_UpdateKnowledgeBaseRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dialogflow.v2beta1.KnowledgeBaseProto .internal_static_google_cloud_dialogflow_v2beta1_UpdateKnowledgeBaseRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest.class, com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest.Builder.class); } // Construct using com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getKnowledgeBaseFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; knowledgeBase_ = null; if (knowledgeBaseBuilder_ != null) { knowledgeBaseBuilder_.dispose(); knowledgeBaseBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dialogflow.v2beta1.KnowledgeBaseProto .internal_static_google_cloud_dialogflow_v2beta1_UpdateKnowledgeBaseRequest_descriptor; } @java.lang.Override public com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest getDefaultInstanceForType() { return com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest build() { com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest buildPartial() { com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest result = new com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.knowledgeBase_ = knowledgeBaseBuilder_ == null ? knowledgeBase_ : knowledgeBaseBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest) { return mergeFrom((com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest other) { if (other == com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest.getDefaultInstance()) return this; if (other.hasKnowledgeBase()) { mergeKnowledgeBase(other.getKnowledgeBase()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getKnowledgeBaseFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.dialogflow.v2beta1.KnowledgeBase knowledgeBase_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.v2beta1.KnowledgeBase, com.google.cloud.dialogflow.v2beta1.KnowledgeBase.Builder, com.google.cloud.dialogflow.v2beta1.KnowledgeBaseOrBuilder> knowledgeBaseBuilder_; /** * * * <pre> * Required. The knowledge base to update. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_base = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the knowledgeBase field is set. */ public boolean hasKnowledgeBase() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The knowledge base to update. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_base = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The knowledgeBase. */ public com.google.cloud.dialogflow.v2beta1.KnowledgeBase getKnowledgeBase() { if (knowledgeBaseBuilder_ == null) { return knowledgeBase_ == null ? com.google.cloud.dialogflow.v2beta1.KnowledgeBase.getDefaultInstance() : knowledgeBase_; } else { return knowledgeBaseBuilder_.getMessage(); } } /** * * * <pre> * Required. The knowledge base to update. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_base = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setKnowledgeBase(com.google.cloud.dialogflow.v2beta1.KnowledgeBase value) { if (knowledgeBaseBuilder_ == null) { if (value == null) { throw new NullPointerException(); } knowledgeBase_ = value; } else { knowledgeBaseBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The knowledge base to update. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_base = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setKnowledgeBase( com.google.cloud.dialogflow.v2beta1.KnowledgeBase.Builder builderForValue) { if (knowledgeBaseBuilder_ == null) { knowledgeBase_ = builderForValue.build(); } else { knowledgeBaseBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The knowledge base to update. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_base = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeKnowledgeBase(com.google.cloud.dialogflow.v2beta1.KnowledgeBase value) { if (knowledgeBaseBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && knowledgeBase_ != null && knowledgeBase_ != com.google.cloud.dialogflow.v2beta1.KnowledgeBase.getDefaultInstance()) { getKnowledgeBaseBuilder().mergeFrom(value); } else { knowledgeBase_ = value; } } else { knowledgeBaseBuilder_.mergeFrom(value); } if (knowledgeBase_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The knowledge base to update. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_base = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearKnowledgeBase() { bitField0_ = (bitField0_ & ~0x00000001); knowledgeBase_ = null; if (knowledgeBaseBuilder_ != null) { knowledgeBaseBuilder_.dispose(); knowledgeBaseBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The knowledge base to update. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_base = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dialogflow.v2beta1.KnowledgeBase.Builder getKnowledgeBaseBuilder() { bitField0_ |= 0x00000001; onChanged(); return getKnowledgeBaseFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The knowledge base to update. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_base = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.dialogflow.v2beta1.KnowledgeBaseOrBuilder getKnowledgeBaseOrBuilder() { if (knowledgeBaseBuilder_ != null) { return knowledgeBaseBuilder_.getMessageOrBuilder(); } else { return knowledgeBase_ == null ? com.google.cloud.dialogflow.v2beta1.KnowledgeBase.getDefaultInstance() : knowledgeBase_; } } /** * * * <pre> * Required. The knowledge base to update. * </pre> * * <code> * .google.cloud.dialogflow.v2beta1.KnowledgeBase knowledge_base = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.v2beta1.KnowledgeBase, com.google.cloud.dialogflow.v2beta1.KnowledgeBase.Builder, com.google.cloud.dialogflow.v2beta1.KnowledgeBaseOrBuilder> getKnowledgeBaseFieldBuilder() { if (knowledgeBaseBuilder_ == null) { knowledgeBaseBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.dialogflow.v2beta1.KnowledgeBase, com.google.cloud.dialogflow.v2beta1.KnowledgeBase.Builder, com.google.cloud.dialogflow.v2beta1.KnowledgeBaseOrBuilder>( getKnowledgeBase(), getParentForChildren(), isClean()); knowledgeBase_ = null; } return knowledgeBaseBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Optional. Not specified means `update all`. * Currently, only `display_name` can be updated, an InvalidArgument will be * returned for attempting to update other fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. Not specified means `update all`. * Currently, only `display_name` can be updated, an InvalidArgument will be * returned for attempting to update other fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Optional. Not specified means `update all`. * Currently, only `display_name` can be updated, an InvalidArgument will be * returned for attempting to update other fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Not specified means `update all`. * Currently, only `display_name` can be updated, an InvalidArgument will be * returned for attempting to update other fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. Not specified means `update all`. * Currently, only `display_name` can be updated, an InvalidArgument will be * returned for attempting to update other fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Optional. Not specified means `update all`. * Currently, only `display_name` can be updated, an InvalidArgument will be * returned for attempting to update other fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Optional. Not specified means `update all`. * Currently, only `display_name` can be updated, an InvalidArgument will be * returned for attempting to update other fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Optional. Not specified means `update all`. * Currently, only `display_name` can be updated, an InvalidArgument will be * returned for attempting to update other fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Optional. Not specified means `update all`. * Currently, only `display_name` can be updated, an InvalidArgument will be * returned for attempting to update other fields. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest) } // @@protoc_insertion_point(class_scope:google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest) private static final com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest(); } public static com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateKnowledgeBaseRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateKnowledgeBaseRequest>() { @java.lang.Override public UpdateKnowledgeBaseRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateKnowledgeBaseRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateKnowledgeBaseRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dialogflow.v2beta1.UpdateKnowledgeBaseRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
oracle/graal
36,139
truffle/src/com.oracle.truffle.api.dsl/src/com/oracle/truffle/api/dsl/SpecializationStatistics.java
/* * Copyright (c) 2020, 2024, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * The Universal Permissive License (UPL), Version 1.0 * * Subject to the condition set forth below, permission is hereby granted to any * person obtaining a copy of this software, associated documentation and/or * data (collectively the "Software"), free of charge and under any and all * copyright rights in the Software, and any and all patent rights owned or * freely licensable by each licensor hereunder covering either (i) the * unmodified Software as contributed to or provided by such licensor, or (ii) * the Larger Works (as defined below), to deal in both * * (a) the Software, and * * (b) any piece of software and/or hardware listed in the lrgrwrks.txt file if * one is included with the Software each a "Larger Work" to which the Software * is contributed by such licensors), * * without restriction, including without limitation the rights to copy, create * derivative works of, display, perform, and distribute the Software and make, * use, sell, offer for sale, import, export, have made, and have sold the * Software and the Larger Work(s), and to sublicense the foregoing rights on * either these or other terms. * * This license is subject to the following condition: * * The above copyright notice and either this complete permission notice or at a * minimum a reference to the UPL must be included in all copies or substantial * portions of the Software. * * THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR * IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, * FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE * AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER * LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, * OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE * SOFTWARE. */ package com.oracle.truffle.api.dsl; import java.io.File; import java.io.PrintStream; import java.io.PrintWriter; import java.lang.annotation.ElementType; import java.lang.annotation.Retention; import java.lang.annotation.RetentionPolicy; import java.lang.annotation.Target; import java.lang.ref.WeakReference; import java.nio.file.Path; import java.nio.file.Paths; import java.util.ArrayList; import java.util.Arrays; import java.util.BitSet; import java.util.Collections; import java.util.Comparator; import java.util.HashMap; import java.util.HashSet; import java.util.IntSummaryStatistics; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Map.Entry; import java.util.Set; import com.oracle.truffle.api.CompilerAsserts; import com.oracle.truffle.api.CompilerDirectives; import com.oracle.truffle.api.CompilerDirectives.CompilationFinal; import com.oracle.truffle.api.CompilerDirectives.TruffleBoundary; import com.oracle.truffle.api.nodes.ExplodeLoop; import com.oracle.truffle.api.nodes.Node; import com.oracle.truffle.api.source.SourceSection; /** * Represents a specialization statistics utiltiy that can be {@link #enter() entered} to collect * additional statistics about Truffle DSL nodes. In order for the statistics to be useful the nodes * need to be regenerated using the <code>-Atruffle.dsl.GenerateSpecializationStatistics=true</code> * flag or using the {@link AlwaysEnabled} annotation. * <p> * The easiest way to use this utility is to enable the * <code>--engine.SpecializationStatistics</code> polyglot option. This should print the histogram * when the engine is closed. * <p> * See also the <a href= * "https://github.com/oracle/graal/blob/master/truffle/docs/SpecializationHistogram.md">usage * tutorial</a> on the website. * * @since 20.3 */ public final class SpecializationStatistics { private static final ThreadLocal<SpecializationStatistics> STATISTICS = new ThreadLocal<>(); private final Map<Class<?>, NodeClassStatistics> classStatistics = new HashMap<>(); private final Map<Node, EnabledNodeStatistics> uncachedStatistics = new HashMap<>(); SpecializationStatistics() { } /** * Returns <code>true</code> if the statistics did collect any data, else <code>false</code>. * * @since 20.3 */ public synchronized boolean hasData() { for (NodeClassStatistics classStatistic : classStatistics.values()) { if (classStatistic.createHistogram().getNodeStat().getSum() > 0) { return true; } } return false; } /** * Prints the specialization histogram to the provided writer. Does not print anything if no * {@link #hasData() data} was collected. * * @see #printHistogram(PrintWriter) * @since 20.3 */ public synchronized void printHistogram(PrintWriter writer) { List<NodeClassHistogram> histograms = new ArrayList<>(); long parentSum = 0; long parentCount = 0; for (NodeClassStatistics classStatistic : classStatistics.values()) { NodeClassHistogram histogram = classStatistic.createHistogram(); histograms.add(histogram); parentSum += histogram.getNodeStat().getSum(); parentCount += histogram.getNodeStat().getCount(); } Collections.sort(histograms, new Comparator<NodeClassHistogram>() { public int compare(NodeClassHistogram o1, NodeClassHistogram o2) { return Long.compare(o1.getNodeStat().getSum(), o2.getNodeStat().getSum()); } }); int width = 0; for (NodeClassHistogram histogram : histograms) { if (histogram.getNodeStat().getSum() == 0) { continue; } width = Math.max(histogram.getLabelWidth(), width); } width = Math.min(width, 80); NodeClassHistogram.printLine(writer, " ", width); for (NodeClassHistogram histogram : histograms) { if (histogram.getNodeStat().getSum() == 0) { continue; } histogram.print(writer, width, parentCount, parentSum); } } /** * Prints the specialization histogram to the provided stream. Does not print anything if no * {@link #hasData() data} was collected. * * @see #printHistogram(PrintWriter) * @since 20.3 */ public synchronized void printHistogram(PrintStream stream) { printHistogram(new PrintWriter(stream)); } /** * Creates a new specialization statistics instance. Note specialization statistics need to be * {@link #enter() entered} to collect data on a thread. * * @since 20.3 */ public static SpecializationStatistics create() { return new SpecializationStatistics(); } private synchronized NodeStatistics createCachedNodeStatistic(Node node, String[] specializations) { NodeClassStatistics classStatistic = getClassStatistics(node.getClass(), specializations); EnabledNodeStatistics stat = new EnabledNodeStatistics(node, classStatistic); classStatistic.statistics.add(stat); if (classStatistic.nodeCounter++ % 1024 == 0) { /* * In order to not crash for code load benchmarks we need to process collected nodes * from time to time to clean them up. */ classStatistic.processCollectedStatistics(); } return stat; } private NodeClassStatistics getClassStatistics(Class<?> nodeClass, String[] specializations) { assert Thread.holdsLock(this); return this.classStatistics.computeIfAbsent(nodeClass, (c) -> new NodeClassStatistics(c, specializations)); } private static NodeStatistics createUncachedNodeStatistic(Node node, String[] specializations) { return new UncachedNodeStatistics(node, specializations); } /** * Enters this specialization instance object on the current thread. After entering a * specialization statistics instance will gather statistics for all nodes with * {@link Specialization specializations} that were created on this entered thread. Multiple * threads may be entered at the same time. The caller must make sure to * {@link #leave(SpecializationStatistics)} the current statistics after entering in all cases. * * @since 20.3 */ @TruffleBoundary public SpecializationStatistics enter() { SpecializationStatistics prev = STATISTICS.get(); STATISTICS.set(this); return prev; } /** * Leaves the currently {@link #enter() entered} entered statistics. It is required to leave a * statistics block after it was entered. It is recommended to use a finally block for this * purpose. * * @since 20.3 */ @SuppressWarnings("static-method") @TruffleBoundary public void leave(SpecializationStatistics prev) { STATISTICS.set(prev); } /** * Used on nodes to always enable specialization statistics. The Truffle DSL processor will not * generate statistics code unless the * <code>-J-Dtruffle.dsl.GenerateSpecializationStatistics=true</code> javac system property is * set. This annotation can be used to annotate node types that want to force enable the * statistics independent of the system property. This annotation is inherited by sub classes. * * @since 20.3 */ @Retention(RetentionPolicy.CLASS) @Target({ElementType.TYPE}) public @interface AlwaysEnabled { } static final class NodeClassStatistics { private List<EnabledNodeStatistics> statistics = new ArrayList<>(); /* * Combines data from all collected nodes. */ private final NodeClassHistogram collectedHistogram; private int nodeCounter; NodeClassStatistics(Class<?> nodeClass, String[] specializations) { this.collectedHistogram = new NodeClassHistogram(nodeClass, specializations); } private void processCollectedStatistics() { boolean found = false; /* * Most calls to processStatistics don't actually need to remove anything. But if * something is removed it is typically more than one entry, so we do a first pass over * the references to find out whether there is a removed node and then recreate the * entire list. */ for (EnabledNodeStatistics statistic : this.statistics) { if (statistic.isCollected()) { found = true; break; } } if (found) { List<EnabledNodeStatistics> newStatistics = new ArrayList<>(); for (EnabledNodeStatistics statistic : this.statistics) { if (statistic.isCollected()) { collectedHistogram.accept(statistic); } else { newStatistics.add(statistic); } } statistics = newStatistics; } } public NodeClassHistogram createHistogram() { NodeClassHistogram h = new NodeClassHistogram(collectedHistogram.getNodeClass(), collectedHistogram.getSpecializationNames()); h.combine(this.collectedHistogram); for (EnabledNodeStatistics stat : statistics) { h.accept(stat); } return h; } } static final class IntStatistics extends IntSummaryStatistics { private SourceSection maxSourceSection; @Override @Deprecated(since = "20.3") public void accept(int value) { throw new UnsupportedOperationException(); } public void accept(int value, SourceSection sourceSection) { if (value > getMax()) { this.maxSourceSection = sourceSection; } super.accept(value); } public void combine(IntStatistics other) { if (other.getMax() > this.getMax()) { this.maxSourceSection = other.maxSourceSection; } super.combine(other); } @Override @Deprecated(since = "20.3") public void combine(IntSummaryStatistics other) { throw new UnsupportedOperationException(); } } static final class NodeClassHistogram { private final Class<?> nodeClass; private final String[] specializationNames; private final IntStatistics nodeStat; private final IntStatistics[] specializationStat; private final List<Map<TypeCombination, IntStatistics>> typeCombinationStat; private final Map<BitSet, IntStatistics[]> specializationCombinationStat; private final Map<BitSet, IntStatistics> specializationCombinationSumStat; @SuppressWarnings("unchecked") NodeClassHistogram(Class<?> nodeClass, String[] specializationNames) { this.nodeClass = nodeClass; this.specializationNames = specializationNames; this.typeCombinationStat = new ArrayList<>(specializationNames.length); this.specializationStat = new IntStatistics[specializationNames.length]; this.nodeStat = new IntStatistics(); for (int i = 0; i < specializationNames.length; i++) { typeCombinationStat.add(new LinkedHashMap<>()); specializationStat[i] = new IntStatistics(); } this.specializationCombinationStat = new HashMap<>(); this.specializationCombinationSumStat = new HashMap<>(); } Class<?> getNodeClass() { return nodeClass; } String[] getSpecializationNames() { return specializationNames; } IntStatistics getNodeStat() { return nodeStat; } void accept(EnabledNodeStatistics statistics) { int nodeSum = 0; SourceSection sourceSection = statistics.getSourceSection(); BitSet enabledBitSet = new BitSet(); for (int i = 0; i < statistics.specializations.length; i++) { TypeCombination combination = statistics.specializations[i]; int specializationSum = 0; while (combination != null) { int count = combination.executionCount; IntStatistics typeCombination = this.typeCombinationStat.get(i).computeIfAbsent(combination, (c) -> new IntStatistics()); typeCombination.accept(count, sourceSection); combination = combination.next; specializationSum += count; } nodeSum += specializationSum; if (specializationSum != 0) { enabledBitSet.set(i); specializationStat[i].accept(specializationSum, sourceSection); } } if (nodeSum == 0) { // not actually executed return; } IntStatistics combinationSumStat = specializationCombinationSumStat.computeIfAbsent(enabledBitSet, (b) -> new IntStatistics()); IntStatistics[] combinationSpecializations = specializationCombinationStat.computeIfAbsent(enabledBitSet, (b) -> new IntStatistics[specializationNames.length]); int combinationSum = 0; for (int i = 0; i < statistics.specializations.length; i++) { TypeCombination combination = statistics.specializations[i]; int specializationSum = 0; while (combination != null) { specializationSum += combination.executionCount; combination = combination.next; } if (specializationSum != 0) { combinationSum += specializationSum; if (combinationSpecializations[i] == null) { combinationSpecializations[i] = new IntStatistics(); } combinationSpecializations[i].accept(specializationSum, sourceSection); } } combinationSumStat.accept(combinationSum, sourceSection); if (nodeSum != 0) { nodeStat.accept(nodeSum, sourceSection); } } void combine(NodeClassHistogram nodeClassStatistics) { for (int i = 0; i < typeCombinationStat.size(); i++) { Map<TypeCombination, IntStatistics> statistics = nodeClassStatistics.typeCombinationStat.get(i); for (Entry<TypeCombination, IntStatistics> executionStat : statistics.entrySet()) { this.typeCombinationStat.get(i).computeIfAbsent(executionStat.getKey(), (c) -> new IntStatistics()).combine(executionStat.getValue()); } for (int j = 0; j < specializationStat.length; j++) { specializationStat[j].combine(nodeClassStatistics.specializationStat[i]); } nodeStat.combine(nodeClassStatistics.nodeStat); } } void print(PrintWriter stream, int width, long parentCount, long parentSum) { // we need 6 more characters to fit the maximum indent if (nodeStat.getCount() == 0) { return; } stream.printf("| %-" + width + "s Instances Executions Executions per instance %n", "Name"); printLine(stream, " ", width); String className = getDisplayName(); printStats(stream, "| ", className, width, nodeStat, parentCount, parentSum); for (int i = 0; i < specializationNames.length; i++) { int size = typeCombinationStat.get(i).size(); String specializationLabel = specializationNames[i]; if (size == 1) { specializationLabel += " " + typeCombinationStat.get(i).keySet().iterator().next().getDisplayName(); } printStats(stream, "| ", specializationLabel, width, specializationStat[i], nodeStat.getCount(), nodeStat.getSum()); if (size > 1) { for (Entry<TypeCombination, IntStatistics> entry : typeCombinationStat.get(i).entrySet()) { printStats(stream, "| ", entry.getKey().getDisplayName(), width, entry.getValue(), specializationStat[i].getCount(), specializationStat[i].getSum()); } } } printLine(stream, "| ", width); Set<BitSet> printedCombinations = new HashSet<>(); for (int specialization = 0; specialization < specializationNames.length; specialization++) { for (BitSet specializations : specializationCombinationStat.keySet()) { if (printedCombinations.contains(specializations)) { continue; } // trying to order them by index. First print all combinations with the first // specialization then all with the second and so on. if (!specializations.get(specialization)) { continue; } IntStatistics statistics = specializationCombinationSumStat.get(specializations); IntStatistics[] specializationStatistics = specializationCombinationStat.get(specializations); int specializationIndex = 0; StringBuilder label = new StringBuilder("["); String sep = ""; int bits = 0; while ((specializationIndex = specializations.nextSetBit(specializationIndex)) != -1) { label.append(sep); label.append(specializationNames[specializationIndex]); sep = ", "; specializationIndex++; // exclude previous bit. bits++; } label.append("]"); printStats(stream, "| ", label.toString(), width, statistics, nodeStat.getCount(), nodeStat.getSum()); if (bits > 1) { specializationIndex = 0; while ((specializationIndex = specializations.nextSetBit(specializationIndex)) != -1) { printStats(stream, "| ", specializationNames[specializationIndex], width, specializationStatistics[specializationIndex], statistics.getCount(), statistics.getSum()); specializationIndex++; // exclude previous bit. } } printedCombinations.add(specializations); } } printLine(stream, " ", width); } static void printLine(PrintWriter stream, String indent, int width) { stream.print(indent); for (int i = 0; i < width + 100 - indent.length(); i++) { stream.print('-'); } stream.print(System.lineSeparator()); } private String getDisplayName() { String className = nodeClass.getSimpleName(); if (className.equals("Uncached") || className.equals("Inlined")) { Class<?> enclosing = nodeClass.getEnclosingClass(); if (enclosing != null) { className = enclosing.getSimpleName() + "." + className; } } return className; } private int getLabelWidth() { int width = 0; width = Math.max(getDisplayName().length(), width); for (String name : specializationNames) { width = Math.max(name.length(), width); } for (Map<TypeCombination, IntStatistics> executionStat : typeCombinationStat) { for (TypeCombination combination : executionStat.keySet()) { width = Math.max(combination.getDisplayName().length(), width); } } return width; } private static void printStats(PrintWriter stream, String indent, String label, int labelWidth, IntStatistics nodeStats, long parentCount, long parentSum) { String countPercent = String.format("(%.0f%%)", ((double) nodeStats.getCount() / (double) parentCount) * 100); String sumPercent = String.format("(%.0f%%)", ((double) nodeStats.getSum() / (double) parentSum) * 100); stream.printf("%s%-" + labelWidth + "s %8d %-6s %12d %-6s Min=%10d Avg=%12.2f Max= %10d MaxNode= %s %n", indent, label, nodeStats.getCount(), countPercent, nodeStats.getSum(), sumPercent, nodeStats.getMin() == Integer.MAX_VALUE ? 0 : nodeStats.getMin(), nodeStats.getAverage(), nodeStats.getMax() == Integer.MIN_VALUE ? 0 : nodeStats.getMax(), formatSourceSection(nodeStats, nodeStats.maxSourceSection)); } // custom version of SourceSection#getShortDescription private static String formatSourceSection(IntStatistics stats, SourceSection s) { if (s == null) { if (stats.getCount() > 0) { return "N/A"; } else { return " - "; } } StringBuilder b = new StringBuilder(); if (s.getSource().getPath() == null) { b.append(s.getSource().getName()); } else { Path pathAbsolute = Paths.get(s.getSource().getPath()); Path pathBase = new File("").getAbsoluteFile().toPath(); try { Path pathRelative = pathBase.relativize(pathAbsolute); b.append(pathRelative.toFile()); } catch (IllegalArgumentException e) { b.append(s.getSource().getName()); } } b.append("~"); formatIndices(b, s); return b.toString(); } private static void formatIndices(StringBuilder b, SourceSection s) { boolean singleLine = s.getStartLine() == s.getEndLine(); if (singleLine) { b.append(s.getStartLine()); } else { b.append(s.getStartLine()).append("-").append(s.getEndLine()); } b.append(":"); if (s.getCharLength() <= 1) { b.append(s.getCharIndex()); } else { b.append(s.getCharIndex()).append("-").append(s.getCharIndex() + s.getCharLength() - 1); } } } static final class TypeCombination { final TypeCombination next; final Class<?>[] types; int executionCount; TypeCombination(TypeCombination next, Class<?>[] types) { this.next = next; this.types = types; } String getDisplayName() { if (types.length == 0) { return "<no-args>"; } StringBuilder b = new StringBuilder(); b.append("<"); String sep = ""; for (int i = 0; i < types.length; i++) { b.append(sep); b.append(types[i].getSimpleName()); sep = " "; } b.append(">"); return b.toString(); } @Override public int hashCode() { return Arrays.hashCode(types); } @Override public boolean equals(Object obj) { if (!(obj instanceof TypeCombination)) { return false; } return Arrays.equals(types, ((TypeCombination) obj).types); } } static final class DisabledNodeStatistics extends NodeStatistics { static final DisabledNodeStatistics INSTANCE = new DisabledNodeStatistics(); @Override public void acceptExecute(int specializationIndex, Class<?> arg0) { } @Override public void acceptExecute(int specializationIndex, Class<?> arg0, Class<?> arg1) { } @Override public void acceptExecute(int specializationIndex, Class<?>... args) { } @Override public Class<?> resolveValueClass(Object value) { return null; } } static final class UncachedNodeStatistics extends NodeStatistics { final Node node; final String[] specializationNames; UncachedNodeStatistics(Node node, String[] specializations) { this.node = node; this.specializationNames = specializations; } @Override @TruffleBoundary public void acceptExecute(int specializationIndex, Class<?> arg0) { lookup().acceptExecute(specializationIndex, arg0); } @Override @TruffleBoundary public void acceptExecute(int specializationIndex, Class<?> arg0, Class<?> arg1) { lookup().acceptExecute(specializationIndex, arg0, arg1); } @Override @TruffleBoundary public void acceptExecute(int specializationIndex, Class<?>... args) { lookup().acceptExecute(specializationIndex, args); } @Override public Class<?> resolveValueClass(Object value) { if (value == null) { return void.class; } else { return value.getClass(); } } private NodeStatistics lookup() { SpecializationStatistics statistics = STATISTICS.get(); if (statistics != null) { synchronized (statistics) { return statistics.uncachedStatistics.computeIfAbsent(node, (n) -> createUncachedStatistic(statistics, n)); } } else { return DisabledNodeStatistics.INSTANCE; } } private EnabledNodeStatistics createUncachedStatistic(SpecializationStatistics statistics, Node n) { NodeClassStatistics classStat = statistics.getClassStatistics(this.node.getClass(), specializationNames); EnabledNodeStatistics nodeStatistic = new EnabledNodeStatistics(n, classStat); classStat.statistics.add(nodeStatistic); return nodeStatistic; } } static final class EnabledNodeStatistics extends NodeStatistics { private static final Object UNDEFINED_SOURCE_SECTION = new Object(); @CompilationFinal(dimensions = 1) final TypeCombination[] specializations; final WeakReference<Node> nodeRef; private Object sourceSection = UNDEFINED_SOURCE_SECTION; EnabledNodeStatistics(Node node, NodeClassStatistics statistics) { this.nodeRef = new WeakReference<>(node); this.specializations = new TypeCombination[statistics.collectedHistogram.getSpecializationNames().length]; } SourceSection getSourceSection() { if (sourceSection == UNDEFINED_SOURCE_SECTION) { return null; } return (SourceSection) sourceSection; } boolean isCollected() { return nodeRef.get() == null; } @Override @ExplodeLoop public void acceptExecute(int specializationIndex, Class<?> arg0) { CompilerAsserts.partialEvaluationConstant(this); TypeCombination combination = specializations[specializationIndex]; while (combination != null) { if (combination.types.length == 1) { if (combination.types[0] == arg0) { combination.executionCount++; return; } } combination = combination.next; } CompilerDirectives.transferToInterpreterAndInvalidate(); insertCombination(specializationIndex, arg0).executionCount++; } @Override @ExplodeLoop public void acceptExecute(int specializationIndex, Class<?> arg0, Class<?> arg1) { CompilerAsserts.partialEvaluationConstant(this); TypeCombination combination = specializations[specializationIndex]; while (combination != null) { if (combination.types.length == 2) { if (combination.types[0] == arg0 && combination.types[1] == arg1) { combination.executionCount++; return; } } combination = combination.next; } CompilerDirectives.transferToInterpreterAndInvalidate(); insertCombination(specializationIndex, arg0, arg1).executionCount++; } @Override @ExplodeLoop public void acceptExecute(int specializationIndex, Class<?>... args) { CompilerAsserts.partialEvaluationConstant(this); TypeCombination combination = findCombination(specializationIndex, args); if (combination != null) { combination.executionCount++; return; } CompilerDirectives.transferToInterpreterAndInvalidate(); insertCombination(specializationIndex, args).executionCount++; } @Override @SuppressWarnings("static-method") public Class<?> resolveValueClass(Object value) { if (value == null) { return void.class; } else { return value.getClass(); } } @ExplodeLoop private TypeCombination findCombination(int specializationIndex, Class<?>... args) { TypeCombination combination = specializations[specializationIndex]; while (combination != null) { if (combination.types.length == args.length) { boolean valid = true; for (int i = 0; i < combination.types.length; i++) { if (combination.types[i] != args[i]) { valid = false; break; } } if (valid) { return combination; } } combination = combination.next; } return null; } private synchronized TypeCombination insertCombination(int specializationIndex, Class<?>... args) { if (this.sourceSection == UNDEFINED_SOURCE_SECTION) { Node node = nodeRef.get(); if (node != null) { this.sourceSection = node.getEncapsulatingSourceSection(); } else { // this should not happen, but there is no guarantee this.sourceSection = null; } } TypeCombination combination = findCombination(specializationIndex, args); if (combination != null) { return combination; } specializations[specializationIndex] = combination = new TypeCombination(specializations[specializationIndex], args); return combination; } } /** * Class to collect statistics information per node. This class is intended to be used by * Truffle DSL generated code only. Do not use directly. * * @since 20.3 */ public abstract static class NodeStatistics { NodeStatistics() { } /** * Called when a node specialization was executed. This method is intended to be used by * Truffle DSL generated code only. Do not use directly. * * @since 20.3 */ public abstract void acceptExecute(int specializationIndex, Class<?> arg0); /** * Called when a node specialization was executed. This method is intended to be used by * Truffle DSL generated code only. Do not use directly. * * @since 20.3 */ public abstract void acceptExecute(int specializationIndex, Class<?> arg0, Class<?> arg1); /** * Called when a node specialization was executed. This method is intended to be used by * Truffle DSL generated code only. Do not use directly. * * @since 20.3 */ public abstract void acceptExecute(int specializationIndex, Class<?>... args); /** * Called to resolve the class of a value provided in {@link #acceptExecute(int, Class)}. * This method is intended to be used by Truffle DSL generated code only. Do not use * directly. * * @since 20.3 */ public abstract Class<?> resolveValueClass(Object value); /** * Called when a new node statistics object is created. This method is intended to be used * by Truffle DSL generated code only. Do not use directly. * * @since 20.3 */ public static NodeStatistics create(Node node, String[] specializations) { if (node.isAdoptable()) { SpecializationStatistics stat = STATISTICS.get(); if (stat == null) { return DisabledNodeStatistics.INSTANCE; } return stat.createCachedNodeStatistic(node, specializations); } else { return SpecializationStatistics.createUncachedNodeStatistic(node, specializations); } } } }
googleapis/google-cloud-java
35,900
java-securitycenter/proto-google-cloud-securitycenter-v2/src/main/java/com/google/cloud/securitycenter/v2/ListSourcesResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/securitycenter/v2/securitycenter_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.securitycenter.v2; /** * * * <pre> * Response message for listing sources. * </pre> * * Protobuf type {@code google.cloud.securitycenter.v2.ListSourcesResponse} */ public final class ListSourcesResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.securitycenter.v2.ListSourcesResponse) ListSourcesResponseOrBuilder { private static final long serialVersionUID = 0L; // Use ListSourcesResponse.newBuilder() to construct. private ListSourcesResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListSourcesResponse() { sources_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListSourcesResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securitycenter.v2.SecuritycenterServiceProto .internal_static_google_cloud_securitycenter_v2_ListSourcesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securitycenter.v2.SecuritycenterServiceProto .internal_static_google_cloud_securitycenter_v2_ListSourcesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securitycenter.v2.ListSourcesResponse.class, com.google.cloud.securitycenter.v2.ListSourcesResponse.Builder.class); } public static final int SOURCES_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.cloud.securitycenter.v2.Source> sources_; /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v2.Source sources = 1;</code> */ @java.lang.Override public java.util.List<com.google.cloud.securitycenter.v2.Source> getSourcesList() { return sources_; } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v2.Source sources = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.securitycenter.v2.SourceOrBuilder> getSourcesOrBuilderList() { return sources_; } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v2.Source sources = 1;</code> */ @java.lang.Override public int getSourcesCount() { return sources_.size(); } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v2.Source sources = 1;</code> */ @java.lang.Override public com.google.cloud.securitycenter.v2.Source getSources(int index) { return sources_.get(index); } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v2.Source sources = 1;</code> */ @java.lang.Override public com.google.cloud.securitycenter.v2.SourceOrBuilder getSourcesOrBuilder(int index) { return sources_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < sources_.size(); i++) { output.writeMessage(1, sources_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < sources_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, sources_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.securitycenter.v2.ListSourcesResponse)) { return super.equals(obj); } com.google.cloud.securitycenter.v2.ListSourcesResponse other = (com.google.cloud.securitycenter.v2.ListSourcesResponse) obj; if (!getSourcesList().equals(other.getSourcesList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getSourcesCount() > 0) { hash = (37 * hash) + SOURCES_FIELD_NUMBER; hash = (53 * hash) + getSourcesList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.securitycenter.v2.ListSourcesResponse parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v2.ListSourcesResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v2.ListSourcesResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v2.ListSourcesResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v2.ListSourcesResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.v2.ListSourcesResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.v2.ListSourcesResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v2.ListSourcesResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securitycenter.v2.ListSourcesResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v2.ListSourcesResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securitycenter.v2.ListSourcesResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.v2.ListSourcesResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.securitycenter.v2.ListSourcesResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Response message for listing sources. * </pre> * * Protobuf type {@code google.cloud.securitycenter.v2.ListSourcesResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.securitycenter.v2.ListSourcesResponse) com.google.cloud.securitycenter.v2.ListSourcesResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securitycenter.v2.SecuritycenterServiceProto .internal_static_google_cloud_securitycenter_v2_ListSourcesResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securitycenter.v2.SecuritycenterServiceProto .internal_static_google_cloud_securitycenter_v2_ListSourcesResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securitycenter.v2.ListSourcesResponse.class, com.google.cloud.securitycenter.v2.ListSourcesResponse.Builder.class); } // Construct using com.google.cloud.securitycenter.v2.ListSourcesResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (sourcesBuilder_ == null) { sources_ = java.util.Collections.emptyList(); } else { sources_ = null; sourcesBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.securitycenter.v2.SecuritycenterServiceProto .internal_static_google_cloud_securitycenter_v2_ListSourcesResponse_descriptor; } @java.lang.Override public com.google.cloud.securitycenter.v2.ListSourcesResponse getDefaultInstanceForType() { return com.google.cloud.securitycenter.v2.ListSourcesResponse.getDefaultInstance(); } @java.lang.Override public com.google.cloud.securitycenter.v2.ListSourcesResponse build() { com.google.cloud.securitycenter.v2.ListSourcesResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.securitycenter.v2.ListSourcesResponse buildPartial() { com.google.cloud.securitycenter.v2.ListSourcesResponse result = new com.google.cloud.securitycenter.v2.ListSourcesResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.securitycenter.v2.ListSourcesResponse result) { if (sourcesBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { sources_ = java.util.Collections.unmodifiableList(sources_); bitField0_ = (bitField0_ & ~0x00000001); } result.sources_ = sources_; } else { result.sources_ = sourcesBuilder_.build(); } } private void buildPartial0(com.google.cloud.securitycenter.v2.ListSourcesResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.securitycenter.v2.ListSourcesResponse) { return mergeFrom((com.google.cloud.securitycenter.v2.ListSourcesResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.securitycenter.v2.ListSourcesResponse other) { if (other == com.google.cloud.securitycenter.v2.ListSourcesResponse.getDefaultInstance()) return this; if (sourcesBuilder_ == null) { if (!other.sources_.isEmpty()) { if (sources_.isEmpty()) { sources_ = other.sources_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureSourcesIsMutable(); sources_.addAll(other.sources_); } onChanged(); } } else { if (!other.sources_.isEmpty()) { if (sourcesBuilder_.isEmpty()) { sourcesBuilder_.dispose(); sourcesBuilder_ = null; sources_ = other.sources_; bitField0_ = (bitField0_ & ~0x00000001); sourcesBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getSourcesFieldBuilder() : null; } else { sourcesBuilder_.addAllMessages(other.sources_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.cloud.securitycenter.v2.Source m = input.readMessage( com.google.cloud.securitycenter.v2.Source.parser(), extensionRegistry); if (sourcesBuilder_ == null) { ensureSourcesIsMutable(); sources_.add(m); } else { sourcesBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.cloud.securitycenter.v2.Source> sources_ = java.util.Collections.emptyList(); private void ensureSourcesIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { sources_ = new java.util.ArrayList<com.google.cloud.securitycenter.v2.Source>(sources_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.securitycenter.v2.Source, com.google.cloud.securitycenter.v2.Source.Builder, com.google.cloud.securitycenter.v2.SourceOrBuilder> sourcesBuilder_; /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v2.Source sources = 1;</code> */ public java.util.List<com.google.cloud.securitycenter.v2.Source> getSourcesList() { if (sourcesBuilder_ == null) { return java.util.Collections.unmodifiableList(sources_); } else { return sourcesBuilder_.getMessageList(); } } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v2.Source sources = 1;</code> */ public int getSourcesCount() { if (sourcesBuilder_ == null) { return sources_.size(); } else { return sourcesBuilder_.getCount(); } } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v2.Source sources = 1;</code> */ public com.google.cloud.securitycenter.v2.Source getSources(int index) { if (sourcesBuilder_ == null) { return sources_.get(index); } else { return sourcesBuilder_.getMessage(index); } } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v2.Source sources = 1;</code> */ public Builder setSources(int index, com.google.cloud.securitycenter.v2.Source value) { if (sourcesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSourcesIsMutable(); sources_.set(index, value); onChanged(); } else { sourcesBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v2.Source sources = 1;</code> */ public Builder setSources( int index, com.google.cloud.securitycenter.v2.Source.Builder builderForValue) { if (sourcesBuilder_ == null) { ensureSourcesIsMutable(); sources_.set(index, builderForValue.build()); onChanged(); } else { sourcesBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v2.Source sources = 1;</code> */ public Builder addSources(com.google.cloud.securitycenter.v2.Source value) { if (sourcesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSourcesIsMutable(); sources_.add(value); onChanged(); } else { sourcesBuilder_.addMessage(value); } return this; } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v2.Source sources = 1;</code> */ public Builder addSources(int index, com.google.cloud.securitycenter.v2.Source value) { if (sourcesBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureSourcesIsMutable(); sources_.add(index, value); onChanged(); } else { sourcesBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v2.Source sources = 1;</code> */ public Builder addSources(com.google.cloud.securitycenter.v2.Source.Builder builderForValue) { if (sourcesBuilder_ == null) { ensureSourcesIsMutable(); sources_.add(builderForValue.build()); onChanged(); } else { sourcesBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v2.Source sources = 1;</code> */ public Builder addSources( int index, com.google.cloud.securitycenter.v2.Source.Builder builderForValue) { if (sourcesBuilder_ == null) { ensureSourcesIsMutable(); sources_.add(index, builderForValue.build()); onChanged(); } else { sourcesBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v2.Source sources = 1;</code> */ public Builder addAllSources( java.lang.Iterable<? extends com.google.cloud.securitycenter.v2.Source> values) { if (sourcesBuilder_ == null) { ensureSourcesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, sources_); onChanged(); } else { sourcesBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v2.Source sources = 1;</code> */ public Builder clearSources() { if (sourcesBuilder_ == null) { sources_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { sourcesBuilder_.clear(); } return this; } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v2.Source sources = 1;</code> */ public Builder removeSources(int index) { if (sourcesBuilder_ == null) { ensureSourcesIsMutable(); sources_.remove(index); onChanged(); } else { sourcesBuilder_.remove(index); } return this; } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v2.Source sources = 1;</code> */ public com.google.cloud.securitycenter.v2.Source.Builder getSourcesBuilder(int index) { return getSourcesFieldBuilder().getBuilder(index); } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v2.Source sources = 1;</code> */ public com.google.cloud.securitycenter.v2.SourceOrBuilder getSourcesOrBuilder(int index) { if (sourcesBuilder_ == null) { return sources_.get(index); } else { return sourcesBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v2.Source sources = 1;</code> */ public java.util.List<? extends com.google.cloud.securitycenter.v2.SourceOrBuilder> getSourcesOrBuilderList() { if (sourcesBuilder_ != null) { return sourcesBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(sources_); } } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v2.Source sources = 1;</code> */ public com.google.cloud.securitycenter.v2.Source.Builder addSourcesBuilder() { return getSourcesFieldBuilder() .addBuilder(com.google.cloud.securitycenter.v2.Source.getDefaultInstance()); } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v2.Source sources = 1;</code> */ public com.google.cloud.securitycenter.v2.Source.Builder addSourcesBuilder(int index) { return getSourcesFieldBuilder() .addBuilder(index, com.google.cloud.securitycenter.v2.Source.getDefaultInstance()); } /** * * * <pre> * Sources belonging to the requested parent. * </pre> * * <code>repeated .google.cloud.securitycenter.v2.Source sources = 1;</code> */ public java.util.List<com.google.cloud.securitycenter.v2.Source.Builder> getSourcesBuilderList() { return getSourcesFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.securitycenter.v2.Source, com.google.cloud.securitycenter.v2.Source.Builder, com.google.cloud.securitycenter.v2.SourceOrBuilder> getSourcesFieldBuilder() { if (sourcesBuilder_ == null) { sourcesBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.securitycenter.v2.Source, com.google.cloud.securitycenter.v2.Source.Builder, com.google.cloud.securitycenter.v2.SourceOrBuilder>( sources_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); sources_ = null; } return sourcesBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Token to retrieve the next page of results, or empty if there are no more * results. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.securitycenter.v2.ListSourcesResponse) } // @@protoc_insertion_point(class_scope:google.cloud.securitycenter.v2.ListSourcesResponse) private static final com.google.cloud.securitycenter.v2.ListSourcesResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.securitycenter.v2.ListSourcesResponse(); } public static com.google.cloud.securitycenter.v2.ListSourcesResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListSourcesResponse> PARSER = new com.google.protobuf.AbstractParser<ListSourcesResponse>() { @java.lang.Override public ListSourcesResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListSourcesResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListSourcesResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.securitycenter.v2.ListSourcesResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/hadoop
35,949
hadoop-mapreduce-project/hadoop-mapreduce-client/hadoop-mapreduce-client-jobclient/src/test/java/org/apache/hadoop/mapreduce/v2/TestSpeculativeExecOnCluster.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.mapreduce.v2; import java.io.DataInput; import java.io.DataOutput; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.HashMap; import java.util.List; import java.util.Map; import org.apache.hadoop.conf.Configuration; import org.apache.hadoop.fs.FileSystem; import org.apache.hadoop.fs.Path; import org.apache.hadoop.fs.permission.FsPermission; import org.apache.hadoop.io.IntWritable; import org.apache.hadoop.io.NullWritable; import org.apache.hadoop.io.Writable; import org.apache.hadoop.mapreduce.Counters; import org.apache.hadoop.mapreduce.InputFormat; import org.apache.hadoop.mapreduce.InputSplit; import org.apache.hadoop.mapreduce.Job; import org.apache.hadoop.mapreduce.JobContext; import org.apache.hadoop.mapreduce.JobCounter; import org.apache.hadoop.mapreduce.JobStatus; import org.apache.hadoop.mapreduce.MRJobConfig; import org.apache.hadoop.mapreduce.Mapper; import org.apache.hadoop.mapreduce.Partitioner; import org.apache.hadoop.mapreduce.RecordReader; import org.apache.hadoop.mapreduce.Reducer; import org.apache.hadoop.mapreduce.TaskAttemptContext; import org.apache.hadoop.mapreduce.TaskAttemptID; import org.apache.hadoop.mapreduce.TaskType; import org.apache.hadoop.mapreduce.lib.input.FileInputFormat; import org.apache.hadoop.mapreduce.lib.output.FileOutputFormat; import org.apache.hadoop.mapreduce.lib.output.NullOutputFormat; import org.apache.hadoop.mapreduce.v2.app.speculate.ExponentiallySmoothedTaskRuntimeEstimator; import org.apache.hadoop.mapreduce.v2.app.speculate.LegacyTaskRuntimeEstimator; import org.apache.hadoop.mapreduce.v2.app.speculate.SimpleExponentialTaskRuntimeEstimator; import org.apache.hadoop.mapreduce.v2.app.speculate.TaskRuntimeEstimator; import org.junit.jupiter.api.AfterEach; import org.junit.jupiter.api.Disabled; import org.junit.jupiter.params.ParameterizedTest; import org.junit.jupiter.params.provider.MethodSource; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import static org.junit.jupiter.api.Assertions.assertEquals; import static org.junit.jupiter.api.Assertions.assertTrue; /** * Test speculation on Mini Cluster. */ @Disabled public class TestSpeculativeExecOnCluster { private static final Logger LOG = LoggerFactory.getLogger(TestSpeculativeExecOnCluster.class); private static final int NODE_MANAGERS_COUNT = 2; private static final boolean ENABLE_SPECULATIVE_MAP = true; private static final boolean ENABLE_SPECULATIVE_REDUCE = true; private static final int NUM_MAP_DEFAULT = 8 * NODE_MANAGERS_COUNT; private static final int NUM_REDUCE_DEFAULT = NUM_MAP_DEFAULT / 2; private static final int MAP_SLEEP_TIME_DEFAULT = 60000; private static final int REDUCE_SLEEP_TIME_DEFAULT = 10000; private static final int MAP_SLEEP_COUNT_DEFAULT = 10000; private static final int REDUCE_SLEEP_COUNT_DEFAULT = 1000; private static final String MAP_SLEEP_COUNT = "mapreduce.sleepjob.map.sleep.count"; private static final String REDUCE_SLEEP_COUNT = "mapreduce.sleepjob.reduce.sleep.count"; private static final String MAP_SLEEP_TIME = "mapreduce.sleepjob.map.sleep.time"; private static final String REDUCE_SLEEP_TIME = "mapreduce.sleepjob.reduce.sleep.time"; private static final String MAP_SLEEP_CALCULATOR_TYPE = "mapreduce.sleepjob.map.sleep.time.calculator"; private static final String MAP_SLEEP_CALCULATOR_TYPE_DEFAULT = "normal_run"; private static Map<String, SleepDurationCalculator> mapSleepTypeMapper; private static FileSystem localFs; static { mapSleepTypeMapper = new HashMap<>(); mapSleepTypeMapper.put("normal_run", new SleepDurationCalcImpl()); mapSleepTypeMapper.put("stalled_run", new StalledSleepDurationCalcImpl()); mapSleepTypeMapper.put("slowing_run", new SlowingSleepDurationCalcImpl()); mapSleepTypeMapper.put("dynamic_slowing_run", new DynamicSleepDurationCalcImpl()); mapSleepTypeMapper.put("step_stalled_run", new StepStalledSleepDurationCalcImpl()); try { localFs = FileSystem.getLocal(new Configuration()); } catch (IOException io) { throw new RuntimeException("problem getting local fs", io); } } private static final Path TEST_ROOT_DIR = new Path("target", TestSpeculativeExecOnCluster.class.getName() + "-tmpDir") .makeQualified(localFs.getUri(), localFs.getWorkingDirectory()); private static final Path APP_JAR = new Path(TEST_ROOT_DIR, "MRAppJar.jar"); private static final Path TEST_OUT_DIR = new Path(TEST_ROOT_DIR, "test.out.dir"); private MiniMRYarnCluster mrCluster; private int myNumMapper; private int myNumReduce; private int myMapSleepTime; private int myReduceSleepTime; private int myMapSleepCount; private int myReduceSleepCount; private String chosenSleepCalc; private Class<?> estimatorClass; /** * The test cases take a long time to run all the estimators against all the * cases. We skip the legacy estimators to reduce the execution time. */ private List<String> ignoredTests; public static Collection<Object[]> getTestParameters() { List<String> ignoredTests = Arrays.asList(new String[] { "stalled_run", "slowing_run", "step_stalled_run" }); return Arrays.asList(new Object[][] { {SimpleExponentialTaskRuntimeEstimator.class, ignoredTests, NUM_MAP_DEFAULT, NUM_REDUCE_DEFAULT}, {LegacyTaskRuntimeEstimator.class, ignoredTests, NUM_MAP_DEFAULT, NUM_REDUCE_DEFAULT} }); } public void initTestSpeculativeExecOnCluster( Class<? extends TaskRuntimeEstimator> pEstimatorKlass, List<String> pTestToIgnore, Integer pNumMapper, Integer pNumReduce) throws IOException { this.ignoredTests = pTestToIgnore; this.estimatorClass = pEstimatorKlass; this.myNumMapper = pNumMapper; this.myNumReduce = pNumReduce; setup(); } public void setup() throws IOException { if (!(new File(MiniMRYarnCluster.APPJAR)).exists()) { LOG.info("MRAppJar " + MiniMRYarnCluster.APPJAR + " not found. Not running test."); return; } if (mrCluster == null) { mrCluster = new MiniMRYarnCluster( TestSpeculativeExecution.class.getName(), NODE_MANAGERS_COUNT); Configuration conf = new Configuration(); mrCluster.init(conf); mrCluster.start(); } // workaround the absent public distcache. localFs.copyFromLocalFile(new Path(MiniMRYarnCluster.APPJAR), APP_JAR); localFs.setPermission(APP_JAR, new FsPermission("700")); myMapSleepTime = MAP_SLEEP_TIME_DEFAULT; myReduceSleepTime = REDUCE_SLEEP_TIME_DEFAULT; myMapSleepCount = MAP_SLEEP_COUNT_DEFAULT; myReduceSleepCount = REDUCE_SLEEP_COUNT_DEFAULT; chosenSleepCalc = MAP_SLEEP_CALCULATOR_TYPE_DEFAULT; } @AfterEach public void tearDown() { if (mrCluster != null) { mrCluster.stop(); mrCluster = null; } } /** * Overrides default behavior of Partitioner for testing. */ public static class SpeculativeSleepJobPartitioner extends Partitioner<IntWritable, NullWritable> { public int getPartition(IntWritable k, NullWritable v, int numPartitions) { return k.get() % numPartitions; } } /** * Overrides default behavior of InputSplit for testing. */ public static class EmptySplit extends InputSplit implements Writable { public void write(DataOutput out) throws IOException { } public void readFields(DataInput in) throws IOException { } public long getLength() { return 0L; } public String[] getLocations() { return new String[0]; } } /** * Input format that sleeps after updating progress. */ public static class SpeculativeSleepInputFormat extends InputFormat<IntWritable, IntWritable> { public List<InputSplit> getSplits(JobContext jobContext) { List<InputSplit> ret = new ArrayList<InputSplit>(); int numSplits = jobContext.getConfiguration(). getInt(MRJobConfig.NUM_MAPS, 1); for (int i = 0; i < numSplits; ++i) { ret.add(new EmptySplit()); } return ret; } public RecordReader<IntWritable, IntWritable> createRecordReader( InputSplit ignored, TaskAttemptContext taskContext) throws IOException { Configuration conf = taskContext.getConfiguration(); final int count = conf.getInt(MAP_SLEEP_COUNT, MAP_SLEEP_COUNT_DEFAULT); if (count < 0) { throw new IOException("Invalid map count: " + count); } final int redcount = conf.getInt(REDUCE_SLEEP_COUNT, REDUCE_SLEEP_COUNT_DEFAULT); if (redcount < 0) { throw new IOException("Invalid reduce count: " + redcount); } final int emitPerMapTask = (redcount * taskContext.getNumReduceTasks()); return new RecordReader<IntWritable, IntWritable>() { private int records = 0; private int emitCount = 0; private IntWritable key = null; private IntWritable value = null; public void initialize(InputSplit split, TaskAttemptContext context) { } public boolean nextKeyValue() throws IOException { if (count == 0) { return false; } key = new IntWritable(); key.set(emitCount); int emit = emitPerMapTask / count; if ((emitPerMapTask) % count > records) { ++emit; } emitCount += emit; value = new IntWritable(); value.set(emit); return records++ < count; } public IntWritable getCurrentKey() { return key; } public IntWritable getCurrentValue() { return value; } public void close() throws IOException { } public float getProgress() throws IOException { return count == 0 ? 100 : records / ((float)count); } }; } } /** * Interface used to simulate different progress rates of the tasks. */ public interface SleepDurationCalculator { long calcSleepDuration(TaskAttemptID taId, int currCount, int totalCount, long defaultSleepDuration); } /** * All tasks have the same progress. */ public static class SleepDurationCalcImpl implements SleepDurationCalculator { private double threshold = 1.0; private double slowFactor = 1.0; SleepDurationCalcImpl() { } public long calcSleepDuration(TaskAttemptID taId, int currCount, int totalCount, long defaultSleepDuration) { if (threshold <= ((double) currCount) / totalCount) { return (long) (slowFactor * defaultSleepDuration); } return defaultSleepDuration; } } /** * The first attempt of task_0 slows down by a small factor that should not * trigger a speculation. An speculated attempt should never beat the * original task. * A conservative estimator/speculator will speculate another attempt * because of the slower progress. */ public static class SlowingSleepDurationCalcImpl implements SleepDurationCalculator { private double threshold = 0.4; private double slowFactor = 1.2; SlowingSleepDurationCalcImpl() { } public long calcSleepDuration(TaskAttemptID taId, int currCount, int totalCount, long defaultSleepDuration) { if ((taId.getTaskType() == TaskType.MAP) && (taId.getTaskID().getId() == 0) && (taId.getId() == 0)) { if (threshold <= ((double) currCount) / totalCount) { return (long) (slowFactor * defaultSleepDuration); } } return defaultSleepDuration; } } /** * The progress of the first Mapper task is stalled by 100 times the other * tasks. * The speculated attempt should be succeed if the estimator detects * the slow down on time. */ public static class StalledSleepDurationCalcImpl implements SleepDurationCalculator { StalledSleepDurationCalcImpl() { } public long calcSleepDuration(TaskAttemptID taId, int currCount, int totalCount, long defaultSleepDuration) { if ((taId.getTaskType() == TaskType.MAP) && (taId.getTaskID().getId() == 0) && (taId.getId() == 0)) { return 1000 * defaultSleepDuration; } return defaultSleepDuration; } } /** * Emulates the behavior with a step change in the progress. */ public static class StepStalledSleepDurationCalcImpl implements SleepDurationCalculator { private double threshold = 0.4; private double slowFactor = 10000; StepStalledSleepDurationCalcImpl() { } public long calcSleepDuration(TaskAttemptID taId, int currCount, int totalCount, long defaultSleepDuration) { if ((taId.getTaskType() == TaskType.MAP) && (taId.getTaskID().getId() == 0) && (taId.getId() == 0)) { if (threshold <= ((double) currCount) / totalCount) { return (long) (slowFactor * defaultSleepDuration); } } return defaultSleepDuration; } } /** * Dynamically slows down the progress of the first Mapper task. * The speculated attempt should be succeed if the estimator detects * the slow down on time. */ public static class DynamicSleepDurationCalcImpl implements SleepDurationCalculator { private double[] thresholds; private double[] slowFactors; DynamicSleepDurationCalcImpl() { thresholds = new double[] { 0.1, 0.25, 0.4, 0.5, 0.6, 0.65, 0.7, 0.8, 0.9 }; slowFactors = new double[] { 2.0, 4.0, 5.0, 6.0, 10.0, 15.0, 20.0, 25.0, 30.0 }; } public long calcSleepDuration(TaskAttemptID taId, int currCount, int totalCount, long defaultSleepDuration) { if ((taId.getTaskType() == TaskType.MAP) && (taId.getTaskID().getId() == 0) && (taId.getId() == 0)) { double currProgress = ((double) currCount) / totalCount; double slowFactor = 1.0; for (int i = 0; i < thresholds.length; i++) { if (thresholds[i] >= currProgress) { break; } slowFactor = slowFactors[i]; } return (long) (slowFactor * defaultSleepDuration); } return defaultSleepDuration; } } /** * Dummy class for testing Speculation. Sleeps for a defined period * of time in mapper. Generates fake input for map / reduce * jobs. Note that generated number of input pairs is in the order * of <code>numMappers * mapSleepTime / 100</code>, so the job uses * some disk space. * The sleep duration for a given task is going to slowDown to evaluate * the estimator */ public static class SpeculativeSleepMapper extends Mapper<IntWritable, IntWritable, IntWritable, NullWritable> { private long mapSleepDuration = MAP_SLEEP_TIME_DEFAULT; private int mapSleepCount = 1; private int count = 0; private SleepDurationCalculator sleepCalc = new SleepDurationCalcImpl(); protected void setup(Context context) throws IOException, InterruptedException { Configuration conf = context.getConfiguration(); this.mapSleepCount = conf.getInt(MAP_SLEEP_COUNT, mapSleepCount); this.mapSleepDuration = mapSleepCount == 0 ? 0 : conf.getLong(MAP_SLEEP_TIME, MAP_SLEEP_TIME_DEFAULT) / mapSleepCount; this.sleepCalc = mapSleepTypeMapper.get(conf.get(MAP_SLEEP_CALCULATOR_TYPE, MAP_SLEEP_CALCULATOR_TYPE_DEFAULT)); } public void map(IntWritable key, IntWritable value, Context context) throws IOException, InterruptedException { //it is expected that every map processes mapSleepCount number of records. try { context.setStatus("Sleeping... (" + (mapSleepDuration * (mapSleepCount - count)) + ") ms left"); long sleepTime = sleepCalc.calcSleepDuration(context.getTaskAttemptID(), count, mapSleepCount, mapSleepDuration); Thread.sleep(sleepTime); } catch (InterruptedException ex) { throw (IOException) new IOException( "Interrupted while sleeping").initCause(ex); } ++count; // output reduceSleepCount * numReduce number of random values, so that // each reducer will get reduceSleepCount number of keys. int k = key.get(); for (int i = 0; i < value.get(); ++i) { context.write(new IntWritable(k + i), NullWritable.get()); } } } /** * Implementation of the reducer task for testing. */ public static class SpeculativeSleepReducer extends Reducer<IntWritable, NullWritable, NullWritable, NullWritable> { private long reduceSleepDuration = REDUCE_SLEEP_TIME_DEFAULT; private int reduceSleepCount = 1; private int count = 0; protected void setup(Context context) throws IOException, InterruptedException { Configuration conf = context.getConfiguration(); this.reduceSleepCount = conf.getInt(REDUCE_SLEEP_COUNT, reduceSleepCount); this.reduceSleepDuration = reduceSleepCount == 0 ? 0 : conf.getLong(REDUCE_SLEEP_TIME, REDUCE_SLEEP_TIME_DEFAULT) / reduceSleepCount; } public void reduce(IntWritable key, Iterable<NullWritable> values, Context context) throws IOException { try { context.setStatus("Sleeping... (" + (reduceSleepDuration * (reduceSleepCount - count)) + ") ms left"); Thread.sleep(reduceSleepDuration); } catch (InterruptedException ex) { throw (IOException) new IOException( "Interrupted while sleeping").initCause(ex); } count++; } } /** * A class used to map the estimatopr implementation to the expected * test results. */ class EstimatorMetricsPair { private Class<?> estimatorClass; private int expectedMapTasks; private int expectedReduceTasks; private boolean speculativeEstimator; EstimatorMetricsPair(Class<?> estimatorClass, int mapTasks, int reduceTasks, boolean isToSpeculate) { this.estimatorClass = estimatorClass; this.expectedMapTasks = mapTasks; this.expectedReduceTasks = reduceTasks; this.speculativeEstimator = isToSpeculate; } boolean didSpeculate(Counters counters) { long launchedMaps = counters.findCounter(JobCounter.TOTAL_LAUNCHED_MAPS) .getValue(); long launchedReduce = counters .findCounter(JobCounter.TOTAL_LAUNCHED_REDUCES) .getValue(); boolean isSpeculated = (launchedMaps > expectedMapTasks || launchedReduce > expectedReduceTasks); return isSpeculated; } String getErrorMessage(Counters counters) { String msg = "Unexpected tasks running estimator " + estimatorClass.getName() + "\n\t"; long launchedMaps = counters.findCounter(JobCounter.TOTAL_LAUNCHED_MAPS) .getValue(); long launchedReduce = counters .findCounter(JobCounter.TOTAL_LAUNCHED_REDUCES) .getValue(); if (speculativeEstimator) { if (launchedMaps < expectedMapTasks) { msg += "maps " + launchedMaps + ", expected: " + expectedMapTasks; } if (launchedReduce < expectedReduceTasks) { msg += ", reduces " + launchedReduce + ", expected: " + expectedReduceTasks; } } else { if (launchedMaps > expectedMapTasks) { msg += "maps " + launchedMaps + ", expected: " + expectedMapTasks; } if (launchedReduce > expectedReduceTasks) { msg += ", reduces " + launchedReduce + ", expected: " + expectedReduceTasks; } } return msg; } } @ParameterizedTest(name = "{index}: TaskEstimator(EstimatorClass {0})") @MethodSource("getTestParameters") public void testExecDynamicSlowingSpeculative( Class<? extends TaskRuntimeEstimator> pEstimatorKlass, List<String> pTestToIgnore, Integer pNumMapper, Integer pNumReduce) throws Exception { initTestSpeculativeExecOnCluster(pEstimatorKlass, pTestToIgnore, pNumMapper, pNumReduce); /*------------------------------------------------------------------ * Test that Map/Red speculates because: * 1- all tasks have same progress rate except for task_0 * 2- task_0 slows down by dynamic increasing factor * 3- A good estimator should readjust the estimation and the speculator * launches a new task. * * Expected: * A- SimpleExponentialTaskRuntimeEstimator: speculates a successful * attempt to beat the slowing task_0 * B- LegacyTaskRuntimeEstimator: speculates an attempt * C- ExponentiallySmoothedTaskRuntimeEstimator: Fails to detect the slow * down and never speculates but it may speculate other tasks * (mappers or reducers) * ----------------------------------------------------------------- */ chosenSleepCalc = "dynamic_slowing_run"; if (ignoredTests.contains(chosenSleepCalc)) { return; } EstimatorMetricsPair[] estimatorPairs = new EstimatorMetricsPair[] { new EstimatorMetricsPair(SimpleExponentialTaskRuntimeEstimator.class, myNumMapper, myNumReduce, true), new EstimatorMetricsPair(LegacyTaskRuntimeEstimator.class, myNumMapper, myNumReduce, true), new EstimatorMetricsPair( ExponentiallySmoothedTaskRuntimeEstimator.class, myNumMapper, myNumReduce, true) }; for (EstimatorMetricsPair specEstimator : estimatorPairs) { if (!estimatorClass.equals(specEstimator.estimatorClass)) { continue; } LOG.info("+++ Dynamic Slow Progress testing against " + estimatorClass .getName() + " +++"); Job job = runSpecTest(); boolean succeeded = job.waitForCompletion(true); assertTrue(succeeded, "Job expected to succeed with estimator " + estimatorClass.getName()); assertEquals(JobStatus.State.SUCCEEDED, job.getJobState(), "Job expected to succeed with estimator " + estimatorClass.getName()); Counters counters = job.getCounters(); String errorMessage = specEstimator.getErrorMessage(counters); boolean didSpeculate = specEstimator.didSpeculate(counters); assertEquals(didSpeculate, specEstimator.speculativeEstimator, errorMessage); assertEquals(0, counters.findCounter(JobCounter.NUM_FAILED_MAPS).getValue(), "Failed maps higher than 0 " + estimatorClass.getName()); } } @ParameterizedTest(name = "{index}: TaskEstimator(EstimatorClass {0})") @MethodSource("getTestParameters") public void testExecSlowNonSpeculative( Class<? extends TaskRuntimeEstimator> pEstimatorKlass, List<String> pTestToIgnore, Integer pNumMapper, Integer pNumReduce) throws Exception { initTestSpeculativeExecOnCluster(pEstimatorKlass, pTestToIgnore, pNumMapper, pNumReduce); /*------------------------------------------------------------------ * Test that Map/Red does not speculate because: * 1- all tasks have same progress rate except for task_0 * 2- task_0 slows down by 0.5 after 50% of the workload * 3- A good estimator may adjust the estimation that the task will finish * sooner than a new speculated task. * * Expected: * A- SimpleExponentialTaskRuntimeEstimator: does not speculate because * the new attempt estimated end time is not going to be smaller than the * original end time. * B- LegacyTaskRuntimeEstimator: speculates an attempt * C- ExponentiallySmoothedTaskRuntimeEstimator: speculates an attempt. * ----------------------------------------------------------------- */ chosenSleepCalc = "slowing_run"; if (ignoredTests.contains(chosenSleepCalc)) { return; } EstimatorMetricsPair[] estimatorPairs = new EstimatorMetricsPair[] { new EstimatorMetricsPair(SimpleExponentialTaskRuntimeEstimator.class, myNumMapper, myNumReduce, false), new EstimatorMetricsPair(LegacyTaskRuntimeEstimator.class, myNumMapper, myNumReduce, true), new EstimatorMetricsPair( ExponentiallySmoothedTaskRuntimeEstimator.class, myNumMapper, myNumReduce, true) }; for (EstimatorMetricsPair specEstimator : estimatorPairs) { if (!estimatorClass.equals(specEstimator.estimatorClass)) { continue; } LOG.info("+++ Linear Slow Progress Non Speculative testing against " + estimatorClass.getName() + " +++"); Job job = runSpecTest(); boolean succeeded = job.waitForCompletion(true); assertTrue(succeeded, "Job expected to succeed with estimator " + estimatorClass.getName()); assertEquals(JobStatus.State.SUCCEEDED, job.getJobState(), "Job expected to succeed with estimator " + estimatorClass.getName()); Counters counters = job.getCounters(); String errorMessage = specEstimator.getErrorMessage(counters); boolean didSpeculate = specEstimator.didSpeculate(counters); assertEquals(didSpeculate, specEstimator.speculativeEstimator, errorMessage); assertEquals(0, counters.findCounter(JobCounter.NUM_FAILED_MAPS).getValue(), "Failed maps higher than 0 " + estimatorClass.getName()); } } @ParameterizedTest(name = "{index}: TaskEstimator(EstimatorClass {0})") @MethodSource("getTestParameters") public void testExecStepStalledSpeculative( Class<? extends TaskRuntimeEstimator> pEstimatorKlass, List<String> pTestToIgnore, Integer pNumMapper, Integer pNumReduce) throws Exception { initTestSpeculativeExecOnCluster(pEstimatorKlass, pTestToIgnore, pNumMapper, pNumReduce); /*------------------------------------------------------------------ * Test that Map/Red speculates because: * 1- all tasks have same progress rate except for task_0 * 2- task_0 has long sleep duration * 3- A good estimator may adjust the estimation that the task will finish * sooner than a new speculated task. * * Expected: * A- SimpleExponentialTaskRuntimeEstimator: speculates * B- LegacyTaskRuntimeEstimator: speculates * C- ExponentiallySmoothedTaskRuntimeEstimator: speculates * ----------------------------------------------------------------- */ chosenSleepCalc = "step_stalled_run"; if (ignoredTests.contains(chosenSleepCalc)) { return; } EstimatorMetricsPair[] estimatorPairs = new EstimatorMetricsPair[] { new EstimatorMetricsPair(SimpleExponentialTaskRuntimeEstimator.class, myNumMapper, myNumReduce, true), new EstimatorMetricsPair(LegacyTaskRuntimeEstimator.class, myNumMapper, myNumReduce, true), new EstimatorMetricsPair( ExponentiallySmoothedTaskRuntimeEstimator.class, myNumMapper, myNumReduce, true) }; for (EstimatorMetricsPair specEstimator : estimatorPairs) { if (!estimatorClass.equals(specEstimator.estimatorClass)) { continue; } LOG.info("+++ Stalled Progress testing against " + estimatorClass.getName() + " +++"); Job job = runSpecTest(); boolean succeeded = job.waitForCompletion(true); assertTrue(succeeded, "Job expected to succeed with estimator " + estimatorClass.getName()); assertEquals(JobStatus.State.SUCCEEDED, job.getJobState(), "Job expected to succeed with estimator " + estimatorClass.getName()); Counters counters = job.getCounters(); String errorMessage = specEstimator.getErrorMessage(counters); boolean didSpeculate = specEstimator.didSpeculate(counters); assertEquals(didSpeculate, specEstimator.speculativeEstimator, errorMessage); assertEquals(0, counters.findCounter(JobCounter.NUM_FAILED_MAPS) .getValue(), "Failed maps higher than 0 " + estimatorClass.getName()); } } @ParameterizedTest(name = "{index}: TaskEstimator(EstimatorClass {0})") @MethodSource("getTestParameters") public void testExecStalledSpeculative( Class<? extends TaskRuntimeEstimator> pEstimatorKlass, List<String> pTestToIgnore, Integer pNumMapper, Integer pNumReduce) throws Exception { initTestSpeculativeExecOnCluster(pEstimatorKlass, pTestToIgnore, pNumMapper, pNumReduce); /*------------------------------------------------------------------ * Test that Map/Red speculates because: * 1- all tasks have same progress rate except for task_0 * 2- task_0 has long sleep duration * 3- A good estimator may adjust the estimation that the task will finish * sooner than a new speculated task. * * Expected: * A- SimpleExponentialTaskRuntimeEstimator: speculates * B- LegacyTaskRuntimeEstimator: speculates * C- ExponentiallySmoothedTaskRuntimeEstimator: speculates * ----------------------------------------------------------------- */ chosenSleepCalc = "stalled_run"; if (ignoredTests.contains(chosenSleepCalc)) { return; } EstimatorMetricsPair[] estimatorPairs = new EstimatorMetricsPair[] { new EstimatorMetricsPair(SimpleExponentialTaskRuntimeEstimator.class, myNumMapper, myNumReduce, true), new EstimatorMetricsPair(LegacyTaskRuntimeEstimator.class, myNumMapper, myNumReduce, true), new EstimatorMetricsPair( ExponentiallySmoothedTaskRuntimeEstimator.class, myNumMapper, myNumReduce, true) }; for (EstimatorMetricsPair specEstimator : estimatorPairs) { if (!estimatorClass.equals(specEstimator.estimatorClass)) { continue; } LOG.info("+++ Stalled Progress testing against " + estimatorClass.getName() + " +++"); Job job = runSpecTest(); boolean succeeded = job.waitForCompletion(true); assertTrue(succeeded, "Job expected to succeed with estimator " + estimatorClass.getName()); assertEquals(JobStatus.State.SUCCEEDED, job.getJobState(), "Job expected to succeed with estimator " + estimatorClass.getName()); Counters counters = job.getCounters(); String errorMessage = specEstimator.getErrorMessage(counters); boolean didSpeculate = specEstimator.didSpeculate(counters); assertEquals(didSpeculate, specEstimator.speculativeEstimator, errorMessage); assertEquals(0, counters.findCounter(JobCounter.NUM_FAILED_MAPS) .getValue(), "Failed maps higher than 0 " + estimatorClass.getName()); } } @ParameterizedTest(name = "{index}: TaskEstimator(EstimatorClass {0})") @MethodSource("getTestParameters") public void testExecNonSpeculative( Class<? extends TaskRuntimeEstimator> pEstimatorKlass, List<String> pTestToIgnore, Integer pNumMapper, Integer pNumReduce) throws Exception { initTestSpeculativeExecOnCluster(pEstimatorKlass, pTestToIgnore, pNumMapper, pNumReduce); /*------------------------------------------------------------------ * Test that Map/Red does not speculate because all tasks progress in the * same rate. * * Expected: * A- SimpleExponentialTaskRuntimeEstimator: does not speculate * B- LegacyTaskRuntimeEstimator: speculates * C- ExponentiallySmoothedTaskRuntimeEstimator: speculates * ----------------------------------------------------------------- */ if (!(new File(MiniMRYarnCluster.APPJAR)).exists()) { LOG.info("MRAppJar " + MiniMRYarnCluster.APPJAR + " not found. Not running test."); return; } if (ignoredTests.contains(chosenSleepCalc)) { return; } EstimatorMetricsPair[] estimatorPairs = new EstimatorMetricsPair[] { new EstimatorMetricsPair(LegacyTaskRuntimeEstimator.class, myNumMapper, myNumReduce, true), new EstimatorMetricsPair(SimpleExponentialTaskRuntimeEstimator.class, myNumMapper, myNumReduce, false), new EstimatorMetricsPair( ExponentiallySmoothedTaskRuntimeEstimator.class, myNumMapper, myNumReduce, true) }; for (EstimatorMetricsPair specEstimator : estimatorPairs) { if (!estimatorClass.equals(specEstimator.estimatorClass)) { continue; } LOG.info("+++ No Speculation testing against " + estimatorClass.getName() + " +++"); Job job = runSpecTest(); boolean succeeded = job.waitForCompletion(true); assertTrue(succeeded, "Job expected to succeed with estimator " + estimatorClass.getName()); assertEquals(JobStatus.State.SUCCEEDED, job.getJobState(), "Job expected to succeed with estimator " + estimatorClass.getName()); Counters counters = job.getCounters(); String errorMessage = specEstimator.getErrorMessage(counters); boolean didSpeculate = specEstimator.didSpeculate(counters); assertEquals(didSpeculate, specEstimator.speculativeEstimator, errorMessage); } } private Job runSpecTest() throws IOException, ClassNotFoundException, InterruptedException { Configuration conf = mrCluster.getConfig(); conf.setBoolean(MRJobConfig.MAP_SPECULATIVE, ENABLE_SPECULATIVE_MAP); conf.setBoolean(MRJobConfig.REDUCE_SPECULATIVE, ENABLE_SPECULATIVE_REDUCE); conf.setClass(MRJobConfig.MR_AM_TASK_ESTIMATOR, estimatorClass, TaskRuntimeEstimator.class); conf.setLong(MAP_SLEEP_TIME, myMapSleepTime); conf.setLong(REDUCE_SLEEP_TIME, myReduceSleepTime); conf.setInt(MAP_SLEEP_COUNT, myMapSleepCount); conf.setInt(REDUCE_SLEEP_COUNT, myReduceSleepCount); conf.setFloat(MRJobConfig.COMPLETED_MAPS_FOR_REDUCE_SLOWSTART, 1.0F); conf.setInt(MRJobConfig.NUM_MAPS, myNumMapper); conf.set(MAP_SLEEP_CALCULATOR_TYPE, chosenSleepCalc); Job job = Job.getInstance(conf); job.setJarByClass(TestSpeculativeExecution.class); job.setMapperClass(SpeculativeSleepMapper.class); job.setMapOutputKeyClass(IntWritable.class); job.setMapOutputValueClass(NullWritable.class); job.setReducerClass(SpeculativeSleepReducer.class); job.setOutputFormatClass(NullOutputFormat.class); job.setInputFormatClass(SpeculativeSleepInputFormat.class); job.setPartitionerClass(SpeculativeSleepJobPartitioner.class); job.setNumReduceTasks(myNumReduce); FileInputFormat.addInputPath(job, new Path("ignored")); // Delete output directory if it exists. try { localFs.delete(TEST_OUT_DIR, true); } catch (IOException e) { // ignore } FileOutputFormat.setOutputPath(job, TEST_OUT_DIR); // Creates the Job Configuration job.addFileToClassPath(APP_JAR); // The AppMaster jar itself. job.setMaxMapAttempts(2); job.submit(); return job; } }
apache/olingo-odata4
36,368
lib/server-core/src/main/java/org/apache/olingo/server/core/ODataDispatcher.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.olingo.server.core; import java.io.IOException; import java.util.List; import org.apache.olingo.commons.api.edm.EdmAction; import org.apache.olingo.commons.api.edm.EdmEntityType; import org.apache.olingo.commons.api.edm.EdmFunction; import org.apache.olingo.commons.api.edm.EdmPrimitiveTypeKind; import org.apache.olingo.commons.api.edm.EdmReturnType; import org.apache.olingo.commons.api.edm.EdmSingleton; import org.apache.olingo.commons.api.edm.EdmType; import org.apache.olingo.commons.api.edm.constants.EdmTypeKind; import org.apache.olingo.commons.api.format.ContentType; import org.apache.olingo.commons.api.http.HttpHeader; import org.apache.olingo.commons.api.http.HttpMethod; import org.apache.olingo.commons.core.edm.primitivetype.EdmPrimitiveTypeFactory; import org.apache.olingo.server.api.ODataApplicationException; import org.apache.olingo.server.api.ODataLibraryException; import org.apache.olingo.server.api.ODataRequest; import org.apache.olingo.server.api.ODataResponse; import org.apache.olingo.server.api.etag.CustomETagSupport; import org.apache.olingo.server.api.etag.PreconditionException; import org.apache.olingo.server.api.processor.ActionComplexCollectionProcessor; import org.apache.olingo.server.api.processor.ActionComplexProcessor; import org.apache.olingo.server.api.processor.ActionEntityCollectionProcessor; import org.apache.olingo.server.api.processor.ActionEntityProcessor; import org.apache.olingo.server.api.processor.ActionPrimitiveCollectionProcessor; import org.apache.olingo.server.api.processor.ActionPrimitiveProcessor; import org.apache.olingo.server.api.processor.ActionVoidProcessor; import org.apache.olingo.server.api.processor.BatchProcessor; import org.apache.olingo.server.api.processor.ComplexCollectionProcessor; import org.apache.olingo.server.api.processor.ComplexProcessor; import org.apache.olingo.server.api.processor.CountComplexCollectionProcessor; import org.apache.olingo.server.api.processor.CountEntityCollectionProcessor; import org.apache.olingo.server.api.processor.CountPrimitiveCollectionProcessor; import org.apache.olingo.server.api.processor.EntityCollectionProcessor; import org.apache.olingo.server.api.processor.EntityProcessor; import org.apache.olingo.server.api.processor.MediaEntityProcessor; import org.apache.olingo.server.api.processor.MetadataProcessor; import org.apache.olingo.server.api.processor.PrimitiveCollectionProcessor; import org.apache.olingo.server.api.processor.PrimitiveProcessor; import org.apache.olingo.server.api.processor.PrimitiveValueProcessor; import org.apache.olingo.server.api.processor.ReferenceCollectionProcessor; import org.apache.olingo.server.api.processor.ReferenceProcessor; import org.apache.olingo.server.api.processor.ServiceDocumentProcessor; import org.apache.olingo.server.api.serializer.RepresentationType; import org.apache.olingo.server.api.uri.UriInfo; import org.apache.olingo.server.api.uri.UriResource; import org.apache.olingo.server.api.uri.UriResourceAction; import org.apache.olingo.server.api.uri.UriResourceEntitySet; import org.apache.olingo.server.api.uri.UriResourceFunction; import org.apache.olingo.server.api.uri.UriResourceNavigation; import org.apache.olingo.server.api.uri.UriResourcePartTyped; import org.apache.olingo.server.api.uri.UriResourcePrimitiveProperty; import org.apache.olingo.server.api.uri.UriResourceProperty; import org.apache.olingo.server.api.uri.UriResourceSingleton; import org.apache.olingo.server.core.batchhandler.BatchHandler; import org.apache.olingo.server.core.etag.PreconditionsValidator; public class ODataDispatcher { private static final String NOT_IMPLEMENTED_MESSAGE = "not implemented"; private final UriInfo uriInfo; private final ODataHandlerImpl handler; private static final String RETURN_MINIMAL = "return=minimal"; private static final String RETURN_REPRESENTATION = "return=representation"; private static final String EDMSTREAM = "Edm.Stream"; public ODataDispatcher(final UriInfo uriInfo, final ODataHandlerImpl handler) { this.uriInfo = uriInfo; this.handler = handler; } public void dispatch(final ODataRequest request, final ODataResponse response) throws ODataApplicationException, ODataLibraryException { switch (uriInfo.getKind()) { case metadata: checkMethods(request.getMethod(), HttpMethod.GET, HttpMethod.HEAD); final ContentType requestedContentType = ContentNegotiator.doContentNegotiation(uriInfo.getFormatOption(), request, handler.getCustomContentTypeSupport(), RepresentationType.METADATA); handler.selectProcessor(MetadataProcessor.class) .readMetadata(request, response, uriInfo, requestedContentType); break; case service: checkMethods(request.getMethod(), HttpMethod.GET, HttpMethod.HEAD); if ("".equals(request.getRawODataPath())) { handler.selectProcessor(RedirectProcessor.class) .redirect(request, response); } else { final ContentType serviceContentType = ContentNegotiator.doContentNegotiation(uriInfo.getFormatOption(), request, handler.getCustomContentTypeSupport(), RepresentationType.SERVICE); handler.selectProcessor(ServiceDocumentProcessor.class) .readServiceDocument(request, response, uriInfo, serviceContentType); } break; case resource: case entityId: handleResourceDispatching(request, response); break; case batch: checkMethod(request.getMethod(), HttpMethod.POST); new BatchHandler(handler, handler.selectProcessor(BatchProcessor.class)) .process(request, response, true); break; default: throw new ODataHandlerException(NOT_IMPLEMENTED_MESSAGE, ODataHandlerException.MessageKeys.FUNCTIONALITY_NOT_IMPLEMENTED); } } private void handleResourceDispatching(final ODataRequest request, final ODataResponse response) throws ODataApplicationException, ODataLibraryException { final int lastPathSegmentIndex = uriInfo.getUriResourceParts().size() - 1; final UriResource lastPathSegment = uriInfo.getUriResourceParts().get(lastPathSegmentIndex); switch (lastPathSegment.getKind()) { case action: checkMethod(request.getMethod(), HttpMethod.POST); handleActionDispatching(request, response, (UriResourceAction) lastPathSegment); break; case function: checkMethod(request.getMethod(), HttpMethod.GET); handleFunctionDispatching(request, response, (UriResourceFunction) lastPathSegment); break; case entitySet: case navigationProperty: handleEntityDispatching(request, response, ((UriResourcePartTyped) lastPathSegment).isCollection(), isEntityOrNavigationMedia(lastPathSegment)); break; case singleton: handleSingleEntityDispatching(request, response, isSingletonMedia(lastPathSegment), true); break; case count: checkMethod(request.getMethod(), HttpMethod.GET); handleCountDispatching(request, response, lastPathSegmentIndex); break; case primitiveProperty: handlePrimitiveDispatching(request, response, ((UriResourceProperty) lastPathSegment).isCollection()); break; case complexProperty: handleComplexDispatching(request, response, ((UriResourceProperty) lastPathSegment).isCollection()); break; case value: handleValueDispatching(request, response, lastPathSegmentIndex); break; case ref: handleReferenceDispatching(request, response, lastPathSegmentIndex); break; default: throw new ODataHandlerException(NOT_IMPLEMENTED_MESSAGE, ODataHandlerException.MessageKeys.FUNCTIONALITY_NOT_IMPLEMENTED); } } private void handleFunctionDispatching(final ODataRequest request, final ODataResponse response, final UriResourceFunction uriResourceFunction) throws ODataApplicationException, ODataLibraryException { EdmFunction function = uriResourceFunction.getFunction(); if (function == null) { function = uriResourceFunction.getFunctionImport().getUnboundFunctions().get(0); } final EdmReturnType returnType = function.getReturnType(); switch (returnType.getType().getKind()) { case ENTITY: handleEntityDispatching(request, response, returnType.isCollection() && uriResourceFunction.getKeyPredicates().isEmpty(), false); break; case PRIMITIVE: handlePrimitiveDispatching(request, response, returnType.isCollection()); break; case COMPLEX: handleComplexDispatching(request, response, returnType.isCollection()); break; default: throw new ODataHandlerException(NOT_IMPLEMENTED_MESSAGE, ODataHandlerException.MessageKeys.FUNCTIONALITY_NOT_IMPLEMENTED); } } private void handleActionDispatching(final ODataRequest request, final ODataResponse response, final UriResourceAction uriResourceAction) throws ODataApplicationException, ODataLibraryException { final EdmAction action = uriResourceAction.getAction(); if (action.isBound()) { // Only bound actions can have ETag control for the binding parameter. validatePreconditions(request, false); } final ContentType requestFormat = getSupportedContentType(request.getHeader(HttpHeader.CONTENT_TYPE), RepresentationType.ACTION_PARAMETERS, false); final EdmReturnType returnType = action.getReturnType(); if (returnType == null) { handler.selectProcessor(ActionVoidProcessor.class) .processActionVoid(request, response, uriInfo, requestFormat); } else { final boolean isCollection = returnType.isCollection(); ContentType responseFormat; switch (returnType.getType().getKind()) { case ENTITY: responseFormat = ContentNegotiator.doContentNegotiation(uriInfo.getFormatOption(), request, handler.getCustomContentTypeSupport(), isCollection ? RepresentationType.COLLECTION_ENTITY : RepresentationType.ENTITY); if (isCollection) { handler.selectProcessor(ActionEntityCollectionProcessor.class) .processActionEntityCollection(request, response, uriInfo, requestFormat, responseFormat); } else { handler.selectProcessor(ActionEntityProcessor.class) .processActionEntity(request, response, uriInfo, requestFormat, responseFormat); } break; case PRIMITIVE: responseFormat = ContentNegotiator.doContentNegotiation(uriInfo.getFormatOption(), request, handler.getCustomContentTypeSupport(), isCollection ? RepresentationType.COLLECTION_PRIMITIVE : RepresentationType.PRIMITIVE); if (isCollection) { handler.selectProcessor(ActionPrimitiveCollectionProcessor.class) .processActionPrimitiveCollection(request, response, uriInfo, requestFormat, responseFormat); } else { handler.selectProcessor(ActionPrimitiveProcessor.class) .processActionPrimitive(request, response, uriInfo, requestFormat, responseFormat); } break; case COMPLEX: responseFormat = ContentNegotiator.doContentNegotiation(uriInfo.getFormatOption(), request, handler.getCustomContentTypeSupport(), isCollection ? RepresentationType.COLLECTION_COMPLEX : RepresentationType.COMPLEX); if (isCollection) { handler.selectProcessor(ActionComplexCollectionProcessor.class) .processActionComplexCollection(request, response, uriInfo, requestFormat, responseFormat); } else { handler.selectProcessor(ActionComplexProcessor.class) .processActionComplex(request, response, uriInfo, requestFormat, responseFormat); } break; default: throw new ODataHandlerException(NOT_IMPLEMENTED_MESSAGE, ODataHandlerException.MessageKeys.FUNCTIONALITY_NOT_IMPLEMENTED); } } } private void handleReferenceDispatching(final ODataRequest request, final ODataResponse response, final int lastPathSegmentIndex) throws ODataApplicationException, ODataLibraryException { final HttpMethod httpMethod = request.getMethod(); final boolean isCollection = ((UriResourcePartTyped) uriInfo.getUriResourceParts() .get(lastPathSegmentIndex - 1)) .isCollection(); if (isCollection && httpMethod == HttpMethod.GET) { validatePreferHeader(request); final ContentType responseFormat = ContentNegotiator.doContentNegotiation(uriInfo.getFormatOption(), request, handler.getCustomContentTypeSupport(), RepresentationType.COLLECTION_REFERENCE); handler.selectProcessor(ReferenceCollectionProcessor.class) .readReferenceCollection(request, response, uriInfo, responseFormat); } else if (isCollection && httpMethod == HttpMethod.POST) { final ContentType requestFormat = getSupportedContentType(request.getHeader(HttpHeader.CONTENT_TYPE), RepresentationType.REFERENCE, true); handler.selectProcessor(ReferenceProcessor.class) .createReference(request, response, uriInfo, requestFormat); } else if (!isCollection && httpMethod == HttpMethod.GET) { validatePreferHeader(request); final ContentType responseFormat = ContentNegotiator.doContentNegotiation(uriInfo.getFormatOption(), request, handler.getCustomContentTypeSupport(), RepresentationType.REFERENCE); handler.selectProcessor(ReferenceProcessor.class).readReference(request, response, uriInfo, responseFormat); } else if (!isCollection && (httpMethod == HttpMethod.PUT || httpMethod == HttpMethod.PATCH)) { final ContentType requestFormat = getSupportedContentType(request.getHeader(HttpHeader.CONTENT_TYPE), RepresentationType.REFERENCE, true); handler.selectProcessor(ReferenceProcessor.class) .updateReference(request, response, uriInfo, requestFormat); } else if (httpMethod == HttpMethod.DELETE) { validatePreferHeader(request); handler.selectProcessor(ReferenceProcessor.class) .deleteReference(request, response, uriInfo); } else { throwMethodNotAllowed(httpMethod); } } private void handleValueDispatching(final ODataRequest request, final ODataResponse response, final int lastPathSegmentIndex) throws ODataApplicationException, ODataLibraryException { // The URI Parser already checked if $value is allowed here so we only have to dispatch to the correct processor final UriResource resource = uriInfo.getUriResourceParts().get(lastPathSegmentIndex - 1); if (resource instanceof UriResourceProperty || resource instanceof UriResourceFunction && ((UriResourceFunction) resource).getType().getKind() == EdmTypeKind.PRIMITIVE) { handlePrimitiveValueDispatching(request, response, resource); } else { handleMediaValueDispatching(request, response, resource); } } private void handleMediaValueDispatching(final ODataRequest request, final ODataResponse response, final UriResource resource) throws ContentNegotiatorException, ODataApplicationException, ODataLibraryException, ODataHandlerException, PreconditionException { final HttpMethod method = request.getMethod(); validatePreferHeader(request); if (method == HttpMethod.GET) { // This can be a GET on an EntitySet, Navigation or Function final ContentType requestedContentType = ContentNegotiator. doContentNegotiation(uriInfo.getFormatOption(), request, handler.getCustomContentTypeSupport(), RepresentationType.MEDIA); handler.selectProcessor(MediaEntityProcessor.class) .readMediaEntity(request, response, uriInfo, requestedContentType); // PUT and DELETE can only be called on EntitySets or Navigation properties which are media resources } else if (method == HttpMethod.PUT && (isEntityOrNavigationMedia(resource) || isSingletonMedia(resource))) { validatePreconditions(request, true); final ContentType requestFormat = ContentType.parse(request.getHeader(HttpHeader.CONTENT_TYPE)); final ContentType responseFormat = ContentNegotiator.doContentNegotiation(uriInfo.getFormatOption(), request, handler.getCustomContentTypeSupport(), RepresentationType.ENTITY); handler.selectProcessor(MediaEntityProcessor.class) .updateMediaEntity(request, response, uriInfo, requestFormat, responseFormat); } else if (method == HttpMethod.DELETE && isEntityOrNavigationMedia(resource)) { validatePreconditions(request, true); handler.selectProcessor(MediaEntityProcessor.class) .deleteMediaEntity(request, response, uriInfo); } else { throwMethodNotAllowed(method); } } private void handlePrimitiveValueDispatching(final ODataRequest request, final ODataResponse response, final UriResource resource) throws ContentNegotiatorException, ODataApplicationException, ODataLibraryException, ODataHandlerException, PreconditionException { final HttpMethod method = request.getMethod(); final EdmType type = resource instanceof UriResourceProperty ? ((UriResourceProperty) resource).getType() : ((UriResourceFunction) resource).getType(); final RepresentationType valueRepresentationType = type == EdmPrimitiveTypeFactory.getInstance(EdmPrimitiveTypeKind.Binary) ? RepresentationType.BINARY : RepresentationType.VALUE; if (method == HttpMethod.GET) { validatePreferHeader(request); final ContentType requestedContentType = ContentNegotiator. doContentNegotiation(uriInfo.getFormatOption(), request, handler.getCustomContentTypeSupport(), valueRepresentationType); handler.selectProcessor(PrimitiveValueProcessor.class) .readPrimitiveValue(request, response, uriInfo, requestedContentType); } else if (method == HttpMethod.PUT && resource instanceof UriResourceProperty) { validatePreconditions(request, false); final ContentType requestFormat = getSupportedContentType(request.getHeader(HttpHeader.CONTENT_TYPE), valueRepresentationType, true); final ContentType responseFormat = ContentNegotiator.doContentNegotiation(uriInfo.getFormatOption(), request, handler.getCustomContentTypeSupport(), valueRepresentationType); handler.selectProcessor(PrimitiveValueProcessor.class) .updatePrimitiveValue(request, response, uriInfo, requestFormat, responseFormat); } else if (method == HttpMethod.DELETE && resource instanceof UriResourceProperty) { validatePreferHeader(request); validatePreconditions(request, false); handler.selectProcessor(PrimitiveValueProcessor.class) .deletePrimitiveValue(request, response, uriInfo); } else { throwMethodNotAllowed(method); } } private void handleComplexDispatching(final ODataRequest request, final ODataResponse response, final boolean isCollection) throws ODataApplicationException, ODataLibraryException { final HttpMethod method = request.getMethod(); final RepresentationType complexRepresentationType = isCollection ? RepresentationType.COLLECTION_COMPLEX : RepresentationType.COMPLEX; if (method == HttpMethod.GET) { validatePreferHeader(request); final ContentType requestedContentType = ContentNegotiator.doContentNegotiation(uriInfo.getFormatOption(), request, handler.getCustomContentTypeSupport(), complexRepresentationType); if (isCollection) { handler.selectProcessor(ComplexCollectionProcessor.class) .readComplexCollection(request, response, uriInfo, requestedContentType); } else { handler.selectProcessor(ComplexProcessor.class) .readComplex(request, response, uriInfo, requestedContentType); } } else if (method == HttpMethod.PUT || method == HttpMethod.PATCH || ((method == HttpMethod.POST) && isCollection)) { validatePreconditions(request, false); final ContentType requestFormat = getSupportedContentType(request.getHeader(HttpHeader.CONTENT_TYPE), complexRepresentationType, true); final ContentType responseFormat = ContentNegotiator.doContentNegotiation(uriInfo.getFormatOption(), request, handler.getCustomContentTypeSupport(), complexRepresentationType); if (isCollection) { handler.selectProcessor(ComplexCollectionProcessor.class) .updateComplexCollection(request, response, uriInfo, requestFormat, responseFormat); } else { handler.selectProcessor(ComplexProcessor.class) .updateComplex(request, response, uriInfo, requestFormat, responseFormat); } } else if (method == HttpMethod.DELETE) { validatePreferHeader(request); validatePreconditions(request, false); if (isCollection) { handler.selectProcessor(ComplexCollectionProcessor.class) .deleteComplexCollection(request, response, uriInfo); } else { handler.selectProcessor(ComplexProcessor.class) .deleteComplex(request, response, uriInfo); } } else { throwMethodNotAllowed(method); } } private void handlePrimitiveDispatching(final ODataRequest request, final ODataResponse response, final boolean isCollection) throws ODataApplicationException, ODataLibraryException { final HttpMethod method = request.getMethod(); final RepresentationType representationType = isCollection ? RepresentationType.COLLECTION_PRIMITIVE : RepresentationType.PRIMITIVE; if (method == HttpMethod.GET) { validatePreferHeader(request); final ContentType requestedContentType = ContentNegotiator.doContentNegotiation(uriInfo.getFormatOption(), request, handler.getCustomContentTypeSupport(), representationType); if (isCollection) { handler.selectProcessor(PrimitiveCollectionProcessor.class) .readPrimitiveCollection(request, response, uriInfo, requestedContentType); } else { handler.selectProcessor(PrimitiveProcessor.class) .readPrimitive(request, response, uriInfo, requestedContentType); } } else if (method == HttpMethod.PUT || method == HttpMethod.PATCH || ((method == HttpMethod.POST) && isCollection)) { validatePreconditions(request, false); ContentType requestFormat = null; List<UriResource> uriResources = uriInfo.getUriResourceParts(); UriResource uriResource = uriResources.get(uriResources.size() - 1); if (uriResource instanceof UriResourcePrimitiveProperty && ((UriResourcePrimitiveProperty)uriResource).getType() .getFullQualifiedName().getFullQualifiedNameAsString().equalsIgnoreCase(EDMSTREAM)) { requestFormat = ContentType.parse(request.getHeader(HttpHeader.CONTENT_TYPE)); } else { requestFormat = getSupportedContentType(request.getHeader(HttpHeader.CONTENT_TYPE), representationType, true); } final ContentType responseFormat = ContentNegotiator.doContentNegotiation(uriInfo.getFormatOption(), request, handler.getCustomContentTypeSupport(), representationType); if (isCollection) { handler.selectProcessor(PrimitiveCollectionProcessor.class) .updatePrimitiveCollection(request, response, uriInfo, requestFormat, responseFormat); } else { handler.selectProcessor(PrimitiveProcessor.class) .updatePrimitive(request, response, uriInfo, requestFormat, responseFormat); } } else if (method == HttpMethod.DELETE) { validatePreferHeader(request); validatePreconditions(request, false); if (isCollection) { handler.selectProcessor(PrimitiveCollectionProcessor.class) .deletePrimitiveCollection(request, response, uriInfo); } else { handler.selectProcessor(PrimitiveProcessor.class) .deletePrimitive(request, response, uriInfo); } } else { throwMethodNotAllowed(method); } } private void handleCountDispatching(final ODataRequest request, final ODataResponse response, final int lastPathSegmentIndex) throws ODataApplicationException, ODataLibraryException { validatePreferHeader(request); final UriResource resource = uriInfo.getUriResourceParts().get(lastPathSegmentIndex - 1); if (resource instanceof UriResourceEntitySet || resource instanceof UriResourceNavigation || resource instanceof UriResourceFunction && ((UriResourceFunction) resource).getType().getKind() == EdmTypeKind.ENTITY) { handler.selectProcessor(CountEntityCollectionProcessor.class) .countEntityCollection(request, response, uriInfo); } else if (resource instanceof UriResourcePrimitiveProperty || resource instanceof UriResourceFunction && ((UriResourceFunction) resource).getType().getKind() == EdmTypeKind.PRIMITIVE) { handler.selectProcessor(CountPrimitiveCollectionProcessor.class) .countPrimitiveCollection(request, response, uriInfo); } else { handler.selectProcessor(CountComplexCollectionProcessor.class) .countComplexCollection(request, response, uriInfo); } } private void handleEntityDispatching(final ODataRequest request, final ODataResponse response, final boolean isCollection, final boolean isMedia) throws ODataApplicationException, ODataLibraryException { if (isCollection) { handleEntityCollectionDispatching(request, response, isMedia); } else { handleSingleEntityDispatching(request, response, isMedia, false); } } private void handleEntityCollectionDispatching(final ODataRequest request, final ODataResponse response, final boolean isMedia ) throws ContentNegotiatorException, ODataApplicationException, ODataLibraryException, ODataHandlerException { final HttpMethod method = request.getMethod(); if (method == HttpMethod.GET) { validatePreferHeader(request); final ContentType requestedContentType = ContentNegotiator. doContentNegotiation(uriInfo.getFormatOption(), request, handler.getCustomContentTypeSupport(), RepresentationType.COLLECTION_ENTITY); handler.selectProcessor(EntityCollectionProcessor.class) .readEntityCollection(request, response, uriInfo, requestedContentType); } else if (method == HttpMethod.POST) { final ContentType responseFormat = ContentNegotiator. doContentNegotiation(uriInfo.getFormatOption(), request, handler.getCustomContentTypeSupport(), RepresentationType.ENTITY); if (isMedia) { validatePreferHeader(request); final ContentType requestFormat = ContentType.parse( request.getHeader(HttpHeader.CONTENT_TYPE)); handler.selectProcessor(MediaEntityProcessor.class) .createMediaEntity(request, response, uriInfo, requestFormat, responseFormat); } else { try { final ContentType requestFormat = (request.getHeader(HttpHeader.CONTENT_TYPE) == null && (request.getBody() == null || request.getBody().available() == 0)) ? getSupportedContentType( request.getHeader(HttpHeader.CONTENT_TYPE), RepresentationType.ENTITY, false) : getSupportedContentType( request.getHeader(HttpHeader.CONTENT_TYPE), RepresentationType.ENTITY, true); handler.selectProcessor(EntityProcessor.class) .createEntity(request, response, uriInfo, requestFormat, responseFormat); } catch (IOException e) { throw new ODataHandlerException("There is problem in the payload.", ODataHandlerException.MessageKeys.INVALID_PAYLOAD); } } } else if (method == HttpMethod.PUT && uriInfo.getUriResourceParts().size()==2) { if (isMedia) { validatePreferHeader(request); } validatePreconditions(request, false); final ContentType requestFormat = getSupportedContentType( request.getHeader(HttpHeader.CONTENT_TYPE), RepresentationType.ENTITY, true); final ContentType responseFormat = ContentNegotiator. doContentNegotiation(uriInfo.getFormatOption(), request, handler.getCustomContentTypeSupport(), RepresentationType.ENTITY); handler.selectProcessor(EntityProcessor.class) .updateEntity(request, response, uriInfo, requestFormat, responseFormat); } else { throwMethodNotAllowed(method); } } /**Checks if Prefer header is set with return=minimal or * return=representation for GET and DELETE requests * @param request * @throws ODataHandlerException */ private void validatePreferHeader(final ODataRequest request) throws ODataHandlerException { final List<String> returnPreference = request.getHeaders(HttpHeader.PREFER); if (null != returnPreference) { for (String preference : returnPreference) { if (preference.equals(RETURN_MINIMAL) || preference.equals(RETURN_REPRESENTATION)) { throw new ODataHandlerException("Prefer Header not supported: " + preference, ODataHandlerException.MessageKeys.INVALID_PREFER_HEADER, preference); } } } } private boolean isSingletonMedia(final UriResource pathSegment) { return pathSegment instanceof UriResourceSingleton && ((UriResourceSingleton) pathSegment).getEntityType().hasStream(); } private void handleSingleEntityDispatching(final ODataRequest request, final ODataResponse response, final boolean isMedia, final boolean isSingleton) throws ContentNegotiatorException, ODataApplicationException, ODataLibraryException, ODataHandlerException, PreconditionException { final HttpMethod method = request.getMethod(); if (method == HttpMethod.GET) { validatePreferHeader(request); final ContentType requestedContentType = ContentNegotiator. doContentNegotiation(uriInfo.getFormatOption(), request, handler.getCustomContentTypeSupport(), RepresentationType.ENTITY); handler.selectProcessor(EntityProcessor.class) .readEntity(request, response, uriInfo, requestedContentType); } else if (method == HttpMethod.PUT || method == HttpMethod.PATCH) { if (isMedia) { validatePreferHeader(request); } validatePreconditions(request, false); final ContentType requestFormat = getSupportedContentType( request.getHeader(HttpHeader.CONTENT_TYPE), RepresentationType.ENTITY, true); final ContentType responseFormat = ContentNegotiator. doContentNegotiation(uriInfo.getFormatOption(), request, handler.getCustomContentTypeSupport(), RepresentationType.ENTITY); handler.selectProcessor(EntityProcessor.class) .updateEntity(request, response, uriInfo, requestFormat, responseFormat); } else if (method == HttpMethod.DELETE && !isSingleton) { validateIsSingleton(method); validatePreconditions(request, false); validatePreferHeader(request); if (isMedia) { ((MediaEntityProcessor) handler.selectProcessor(MediaEntityProcessor.class)) .deleteEntity(request, response, uriInfo); } else { ((EntityProcessor) handler.selectProcessor(EntityProcessor.class)) .deleteEntity(request, response, uriInfo); } } else { throwMethodNotAllowed(method); } } /*Delete method is not allowed for Entities navigating to Singleton*/ private void validateIsSingleton(HttpMethod method) throws ODataHandlerException { final int lastPathSegmentIndex = uriInfo.getUriResourceParts().size() - 1; final UriResource pathSegment = uriInfo.getUriResourceParts().get(lastPathSegmentIndex); if (pathSegment instanceof UriResourceNavigation && uriInfo.getUriResourceParts().get(lastPathSegmentIndex - 1) instanceof UriResourceEntitySet && ((UriResourceEntitySet) uriInfo.getUriResourceParts().get(lastPathSegmentIndex - 1)).getEntitySet() .getRelatedBindingTarget( pathSegment.getSegmentValue()) instanceof EdmSingleton) { throwMethodNotAllowed(method); } } private void validatePreconditions(final ODataRequest request, final boolean isMediaValue) throws PreconditionException { // If needed perform preconditions validation. final CustomETagSupport eTagSupport = handler.getCustomETagSupport(); if (eTagSupport != null && new PreconditionsValidator(uriInfo).mustValidatePreconditions(eTagSupport, isMediaValue) && request.getHeader(HttpHeader.IF_MATCH) == null && request.getHeader(HttpHeader.IF_NONE_MATCH) == null) { throw new PreconditionException("Expected an if-match or if-none-match header.", PreconditionException.MessageKeys.MISSING_HEADER); } } private void checkMethod(final HttpMethod requestMethod, final HttpMethod allowedMethod) throws ODataHandlerException { if (requestMethod != allowedMethod) { throwMethodNotAllowed(requestMethod); } } private void checkMethods(final HttpMethod requestMethod, final HttpMethod... allowedMethods) throws ODataHandlerException { //Check if the request method is one of the allowed ones for (int i = 0; i < allowedMethods.length; i++) { if (requestMethod == allowedMethods[i]) { return; } } //request method does not match any allowed method throwMethodNotAllowed(requestMethod); } private void throwMethodNotAllowed(final HttpMethod httpMethod) throws ODataHandlerException { throw new ODataHandlerException("HTTP method " + httpMethod + " is not allowed.", ODataHandlerException.MessageKeys.HTTP_METHOD_NOT_ALLOWED, httpMethod.toString()); } private ContentType getSupportedContentType(final String contentTypeHeader, final RepresentationType representationType, final boolean mustNotBeNull) throws ODataHandlerException, ContentNegotiatorException { if (contentTypeHeader == null) { if (mustNotBeNull) { throw new ODataHandlerException("ContentTypeHeader parameter is null", ODataHandlerException.MessageKeys.MISSING_CONTENT_TYPE); } return ContentType.APPLICATION_JSON; } ContentType contentType; try { contentType = ContentType.create(contentTypeHeader); } catch (final IllegalArgumentException e) { throw new ODataHandlerException("Illegal content type.", e, ODataHandlerException.MessageKeys.INVALID_CONTENT_TYPE, contentTypeHeader); } ContentNegotiator.checkSupport(contentType, handler.getCustomContentTypeSupport(), representationType); return contentType; } private boolean isEntityOrNavigationMedia(final UriResource pathSegment) { // This method MUST NOT check if the resource is of type function since these are handled differently return pathSegment instanceof UriResourceEntitySet && ((UriResourceEntitySet) pathSegment).getEntityType().hasStream() || pathSegment instanceof UriResourceNavigation && ((EdmEntityType) ((UriResourceNavigation) pathSegment).getType()).hasStream(); } }
googleapis/google-cloud-java
35,977
java-apihub/proto-google-cloud-apihub-v1/src/main/java/com/google/cloud/apihub/v1/UpdateApiOperationRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/apihub/v1/apihub_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.apihub.v1; /** * * * <pre> * The [UpdateApiOperation][google.cloud.apihub.v1.ApiHub.UpdateApiOperation] * method's request. * </pre> * * Protobuf type {@code google.cloud.apihub.v1.UpdateApiOperationRequest} */ public final class UpdateApiOperationRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.apihub.v1.UpdateApiOperationRequest) UpdateApiOperationRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateApiOperationRequest.newBuilder() to construct. private UpdateApiOperationRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateApiOperationRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateApiOperationRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.apihub.v1.ApiHubServiceProto .internal_static_google_cloud_apihub_v1_UpdateApiOperationRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.apihub.v1.ApiHubServiceProto .internal_static_google_cloud_apihub_v1_UpdateApiOperationRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.apihub.v1.UpdateApiOperationRequest.class, com.google.cloud.apihub.v1.UpdateApiOperationRequest.Builder.class); } private int bitField0_; public static final int API_OPERATION_FIELD_NUMBER = 1; private com.google.cloud.apihub.v1.ApiOperation apiOperation_; /** * * * <pre> * Required. The apiOperation resource to update. * * The operation resource's `name` field is used to identify the operation * resource to update. * Format: * `projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}` * </pre> * * <code> * .google.cloud.apihub.v1.ApiOperation api_operation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the apiOperation field is set. */ @java.lang.Override public boolean hasApiOperation() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The apiOperation resource to update. * * The operation resource's `name` field is used to identify the operation * resource to update. * Format: * `projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}` * </pre> * * <code> * .google.cloud.apihub.v1.ApiOperation api_operation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The apiOperation. */ @java.lang.Override public com.google.cloud.apihub.v1.ApiOperation getApiOperation() { return apiOperation_ == null ? com.google.cloud.apihub.v1.ApiOperation.getDefaultInstance() : apiOperation_; } /** * * * <pre> * Required. The apiOperation resource to update. * * The operation resource's `name` field is used to identify the operation * resource to update. * Format: * `projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}` * </pre> * * <code> * .google.cloud.apihub.v1.ApiOperation api_operation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.apihub.v1.ApiOperationOrBuilder getApiOperationOrBuilder() { return apiOperation_ == null ? com.google.cloud.apihub.v1.ApiOperation.getDefaultInstance() : apiOperation_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getApiOperation()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getApiOperation()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.apihub.v1.UpdateApiOperationRequest)) { return super.equals(obj); } com.google.cloud.apihub.v1.UpdateApiOperationRequest other = (com.google.cloud.apihub.v1.UpdateApiOperationRequest) obj; if (hasApiOperation() != other.hasApiOperation()) return false; if (hasApiOperation()) { if (!getApiOperation().equals(other.getApiOperation())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasApiOperation()) { hash = (37 * hash) + API_OPERATION_FIELD_NUMBER; hash = (53 * hash) + getApiOperation().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.apihub.v1.UpdateApiOperationRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.apihub.v1.UpdateApiOperationRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.apihub.v1.UpdateApiOperationRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.apihub.v1.UpdateApiOperationRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.apihub.v1.UpdateApiOperationRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.apihub.v1.UpdateApiOperationRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.apihub.v1.UpdateApiOperationRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.apihub.v1.UpdateApiOperationRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.apihub.v1.UpdateApiOperationRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.apihub.v1.UpdateApiOperationRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.apihub.v1.UpdateApiOperationRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.apihub.v1.UpdateApiOperationRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.apihub.v1.UpdateApiOperationRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The [UpdateApiOperation][google.cloud.apihub.v1.ApiHub.UpdateApiOperation] * method's request. * </pre> * * Protobuf type {@code google.cloud.apihub.v1.UpdateApiOperationRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.apihub.v1.UpdateApiOperationRequest) com.google.cloud.apihub.v1.UpdateApiOperationRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.apihub.v1.ApiHubServiceProto .internal_static_google_cloud_apihub_v1_UpdateApiOperationRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.apihub.v1.ApiHubServiceProto .internal_static_google_cloud_apihub_v1_UpdateApiOperationRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.apihub.v1.UpdateApiOperationRequest.class, com.google.cloud.apihub.v1.UpdateApiOperationRequest.Builder.class); } // Construct using com.google.cloud.apihub.v1.UpdateApiOperationRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getApiOperationFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; apiOperation_ = null; if (apiOperationBuilder_ != null) { apiOperationBuilder_.dispose(); apiOperationBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.apihub.v1.ApiHubServiceProto .internal_static_google_cloud_apihub_v1_UpdateApiOperationRequest_descriptor; } @java.lang.Override public com.google.cloud.apihub.v1.UpdateApiOperationRequest getDefaultInstanceForType() { return com.google.cloud.apihub.v1.UpdateApiOperationRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.apihub.v1.UpdateApiOperationRequest build() { com.google.cloud.apihub.v1.UpdateApiOperationRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.apihub.v1.UpdateApiOperationRequest buildPartial() { com.google.cloud.apihub.v1.UpdateApiOperationRequest result = new com.google.cloud.apihub.v1.UpdateApiOperationRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.apihub.v1.UpdateApiOperationRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.apiOperation_ = apiOperationBuilder_ == null ? apiOperation_ : apiOperationBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.apihub.v1.UpdateApiOperationRequest) { return mergeFrom((com.google.cloud.apihub.v1.UpdateApiOperationRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.apihub.v1.UpdateApiOperationRequest other) { if (other == com.google.cloud.apihub.v1.UpdateApiOperationRequest.getDefaultInstance()) return this; if (other.hasApiOperation()) { mergeApiOperation(other.getApiOperation()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getApiOperationFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.apihub.v1.ApiOperation apiOperation_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.apihub.v1.ApiOperation, com.google.cloud.apihub.v1.ApiOperation.Builder, com.google.cloud.apihub.v1.ApiOperationOrBuilder> apiOperationBuilder_; /** * * * <pre> * Required. The apiOperation resource to update. * * The operation resource's `name` field is used to identify the operation * resource to update. * Format: * `projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}` * </pre> * * <code> * .google.cloud.apihub.v1.ApiOperation api_operation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the apiOperation field is set. */ public boolean hasApiOperation() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The apiOperation resource to update. * * The operation resource's `name` field is used to identify the operation * resource to update. * Format: * `projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}` * </pre> * * <code> * .google.cloud.apihub.v1.ApiOperation api_operation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The apiOperation. */ public com.google.cloud.apihub.v1.ApiOperation getApiOperation() { if (apiOperationBuilder_ == null) { return apiOperation_ == null ? com.google.cloud.apihub.v1.ApiOperation.getDefaultInstance() : apiOperation_; } else { return apiOperationBuilder_.getMessage(); } } /** * * * <pre> * Required. The apiOperation resource to update. * * The operation resource's `name` field is used to identify the operation * resource to update. * Format: * `projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}` * </pre> * * <code> * .google.cloud.apihub.v1.ApiOperation api_operation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setApiOperation(com.google.cloud.apihub.v1.ApiOperation value) { if (apiOperationBuilder_ == null) { if (value == null) { throw new NullPointerException(); } apiOperation_ = value; } else { apiOperationBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The apiOperation resource to update. * * The operation resource's `name` field is used to identify the operation * resource to update. * Format: * `projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}` * </pre> * * <code> * .google.cloud.apihub.v1.ApiOperation api_operation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setApiOperation( com.google.cloud.apihub.v1.ApiOperation.Builder builderForValue) { if (apiOperationBuilder_ == null) { apiOperation_ = builderForValue.build(); } else { apiOperationBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The apiOperation resource to update. * * The operation resource's `name` field is used to identify the operation * resource to update. * Format: * `projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}` * </pre> * * <code> * .google.cloud.apihub.v1.ApiOperation api_operation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeApiOperation(com.google.cloud.apihub.v1.ApiOperation value) { if (apiOperationBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && apiOperation_ != null && apiOperation_ != com.google.cloud.apihub.v1.ApiOperation.getDefaultInstance()) { getApiOperationBuilder().mergeFrom(value); } else { apiOperation_ = value; } } else { apiOperationBuilder_.mergeFrom(value); } if (apiOperation_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The apiOperation resource to update. * * The operation resource's `name` field is used to identify the operation * resource to update. * Format: * `projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}` * </pre> * * <code> * .google.cloud.apihub.v1.ApiOperation api_operation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearApiOperation() { bitField0_ = (bitField0_ & ~0x00000001); apiOperation_ = null; if (apiOperationBuilder_ != null) { apiOperationBuilder_.dispose(); apiOperationBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The apiOperation resource to update. * * The operation resource's `name` field is used to identify the operation * resource to update. * Format: * `projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}` * </pre> * * <code> * .google.cloud.apihub.v1.ApiOperation api_operation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.apihub.v1.ApiOperation.Builder getApiOperationBuilder() { bitField0_ |= 0x00000001; onChanged(); return getApiOperationFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The apiOperation resource to update. * * The operation resource's `name` field is used to identify the operation * resource to update. * Format: * `projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}` * </pre> * * <code> * .google.cloud.apihub.v1.ApiOperation api_operation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.apihub.v1.ApiOperationOrBuilder getApiOperationOrBuilder() { if (apiOperationBuilder_ != null) { return apiOperationBuilder_.getMessageOrBuilder(); } else { return apiOperation_ == null ? com.google.cloud.apihub.v1.ApiOperation.getDefaultInstance() : apiOperation_; } } /** * * * <pre> * Required. The apiOperation resource to update. * * The operation resource's `name` field is used to identify the operation * resource to update. * Format: * `projects/{project}/locations/{location}/apis/{api}/versions/{version}/operations/{operation}` * </pre> * * <code> * .google.cloud.apihub.v1.ApiOperation api_operation = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.apihub.v1.ApiOperation, com.google.cloud.apihub.v1.ApiOperation.Builder, com.google.cloud.apihub.v1.ApiOperationOrBuilder> getApiOperationFieldBuilder() { if (apiOperationBuilder_ == null) { apiOperationBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.apihub.v1.ApiOperation, com.google.cloud.apihub.v1.ApiOperation.Builder, com.google.cloud.apihub.v1.ApiOperationOrBuilder>( getApiOperation(), getParentForChildren(), isClean()); apiOperation_ = null; } return apiOperationBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Required. The list of fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.apihub.v1.UpdateApiOperationRequest) } // @@protoc_insertion_point(class_scope:google.cloud.apihub.v1.UpdateApiOperationRequest) private static final com.google.cloud.apihub.v1.UpdateApiOperationRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.apihub.v1.UpdateApiOperationRequest(); } public static com.google.cloud.apihub.v1.UpdateApiOperationRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateApiOperationRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateApiOperationRequest>() { @java.lang.Override public UpdateApiOperationRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateApiOperationRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateApiOperationRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.apihub.v1.UpdateApiOperationRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,933
java-document-ai/proto-google-cloud-document-ai-v1/src/main/java/com/google/cloud/documentai/v1/BatchDocumentsInputConfig.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/documentai/v1/document_io.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.documentai.v1; /** * * * <pre> * The common config to specify a set of documents used as input. * </pre> * * Protobuf type {@code google.cloud.documentai.v1.BatchDocumentsInputConfig} */ public final class BatchDocumentsInputConfig extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.documentai.v1.BatchDocumentsInputConfig) BatchDocumentsInputConfigOrBuilder { private static final long serialVersionUID = 0L; // Use BatchDocumentsInputConfig.newBuilder() to construct. private BatchDocumentsInputConfig(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private BatchDocumentsInputConfig() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new BatchDocumentsInputConfig(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.documentai.v1.DocumentIoProto .internal_static_google_cloud_documentai_v1_BatchDocumentsInputConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.documentai.v1.DocumentIoProto .internal_static_google_cloud_documentai_v1_BatchDocumentsInputConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.documentai.v1.BatchDocumentsInputConfig.class, com.google.cloud.documentai.v1.BatchDocumentsInputConfig.Builder.class); } private int sourceCase_ = 0; @SuppressWarnings("serial") private java.lang.Object source_; public enum SourceCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { GCS_PREFIX(1), GCS_DOCUMENTS(2), SOURCE_NOT_SET(0); private final int value; private SourceCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static SourceCase valueOf(int value) { return forNumber(value); } public static SourceCase forNumber(int value) { switch (value) { case 1: return GCS_PREFIX; case 2: return GCS_DOCUMENTS; case 0: return SOURCE_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public SourceCase getSourceCase() { return SourceCase.forNumber(sourceCase_); } public static final int GCS_PREFIX_FIELD_NUMBER = 1; /** * * * <pre> * The set of documents that match the specified Cloud Storage `gcs_prefix`. * </pre> * * <code>.google.cloud.documentai.v1.GcsPrefix gcs_prefix = 1;</code> * * @return Whether the gcsPrefix field is set. */ @java.lang.Override public boolean hasGcsPrefix() { return sourceCase_ == 1; } /** * * * <pre> * The set of documents that match the specified Cloud Storage `gcs_prefix`. * </pre> * * <code>.google.cloud.documentai.v1.GcsPrefix gcs_prefix = 1;</code> * * @return The gcsPrefix. */ @java.lang.Override public com.google.cloud.documentai.v1.GcsPrefix getGcsPrefix() { if (sourceCase_ == 1) { return (com.google.cloud.documentai.v1.GcsPrefix) source_; } return com.google.cloud.documentai.v1.GcsPrefix.getDefaultInstance(); } /** * * * <pre> * The set of documents that match the specified Cloud Storage `gcs_prefix`. * </pre> * * <code>.google.cloud.documentai.v1.GcsPrefix gcs_prefix = 1;</code> */ @java.lang.Override public com.google.cloud.documentai.v1.GcsPrefixOrBuilder getGcsPrefixOrBuilder() { if (sourceCase_ == 1) { return (com.google.cloud.documentai.v1.GcsPrefix) source_; } return com.google.cloud.documentai.v1.GcsPrefix.getDefaultInstance(); } public static final int GCS_DOCUMENTS_FIELD_NUMBER = 2; /** * * * <pre> * The set of documents individually specified on Cloud Storage. * </pre> * * <code>.google.cloud.documentai.v1.GcsDocuments gcs_documents = 2;</code> * * @return Whether the gcsDocuments field is set. */ @java.lang.Override public boolean hasGcsDocuments() { return sourceCase_ == 2; } /** * * * <pre> * The set of documents individually specified on Cloud Storage. * </pre> * * <code>.google.cloud.documentai.v1.GcsDocuments gcs_documents = 2;</code> * * @return The gcsDocuments. */ @java.lang.Override public com.google.cloud.documentai.v1.GcsDocuments getGcsDocuments() { if (sourceCase_ == 2) { return (com.google.cloud.documentai.v1.GcsDocuments) source_; } return com.google.cloud.documentai.v1.GcsDocuments.getDefaultInstance(); } /** * * * <pre> * The set of documents individually specified on Cloud Storage. * </pre> * * <code>.google.cloud.documentai.v1.GcsDocuments gcs_documents = 2;</code> */ @java.lang.Override public com.google.cloud.documentai.v1.GcsDocumentsOrBuilder getGcsDocumentsOrBuilder() { if (sourceCase_ == 2) { return (com.google.cloud.documentai.v1.GcsDocuments) source_; } return com.google.cloud.documentai.v1.GcsDocuments.getDefaultInstance(); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (sourceCase_ == 1) { output.writeMessage(1, (com.google.cloud.documentai.v1.GcsPrefix) source_); } if (sourceCase_ == 2) { output.writeMessage(2, (com.google.cloud.documentai.v1.GcsDocuments) source_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (sourceCase_ == 1) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 1, (com.google.cloud.documentai.v1.GcsPrefix) source_); } if (sourceCase_ == 2) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 2, (com.google.cloud.documentai.v1.GcsDocuments) source_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.documentai.v1.BatchDocumentsInputConfig)) { return super.equals(obj); } com.google.cloud.documentai.v1.BatchDocumentsInputConfig other = (com.google.cloud.documentai.v1.BatchDocumentsInputConfig) obj; if (!getSourceCase().equals(other.getSourceCase())) return false; switch (sourceCase_) { case 1: if (!getGcsPrefix().equals(other.getGcsPrefix())) return false; break; case 2: if (!getGcsDocuments().equals(other.getGcsDocuments())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); switch (sourceCase_) { case 1: hash = (37 * hash) + GCS_PREFIX_FIELD_NUMBER; hash = (53 * hash) + getGcsPrefix().hashCode(); break; case 2: hash = (37 * hash) + GCS_DOCUMENTS_FIELD_NUMBER; hash = (53 * hash) + getGcsDocuments().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.documentai.v1.BatchDocumentsInputConfig parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.documentai.v1.BatchDocumentsInputConfig parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.documentai.v1.BatchDocumentsInputConfig parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.documentai.v1.BatchDocumentsInputConfig parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.documentai.v1.BatchDocumentsInputConfig parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.documentai.v1.BatchDocumentsInputConfig parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.documentai.v1.BatchDocumentsInputConfig parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.documentai.v1.BatchDocumentsInputConfig parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.documentai.v1.BatchDocumentsInputConfig parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.documentai.v1.BatchDocumentsInputConfig parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.documentai.v1.BatchDocumentsInputConfig parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.documentai.v1.BatchDocumentsInputConfig parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.documentai.v1.BatchDocumentsInputConfig prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The common config to specify a set of documents used as input. * </pre> * * Protobuf type {@code google.cloud.documentai.v1.BatchDocumentsInputConfig} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.documentai.v1.BatchDocumentsInputConfig) com.google.cloud.documentai.v1.BatchDocumentsInputConfigOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.documentai.v1.DocumentIoProto .internal_static_google_cloud_documentai_v1_BatchDocumentsInputConfig_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.documentai.v1.DocumentIoProto .internal_static_google_cloud_documentai_v1_BatchDocumentsInputConfig_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.documentai.v1.BatchDocumentsInputConfig.class, com.google.cloud.documentai.v1.BatchDocumentsInputConfig.Builder.class); } // Construct using com.google.cloud.documentai.v1.BatchDocumentsInputConfig.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (gcsPrefixBuilder_ != null) { gcsPrefixBuilder_.clear(); } if (gcsDocumentsBuilder_ != null) { gcsDocumentsBuilder_.clear(); } sourceCase_ = 0; source_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.documentai.v1.DocumentIoProto .internal_static_google_cloud_documentai_v1_BatchDocumentsInputConfig_descriptor; } @java.lang.Override public com.google.cloud.documentai.v1.BatchDocumentsInputConfig getDefaultInstanceForType() { return com.google.cloud.documentai.v1.BatchDocumentsInputConfig.getDefaultInstance(); } @java.lang.Override public com.google.cloud.documentai.v1.BatchDocumentsInputConfig build() { com.google.cloud.documentai.v1.BatchDocumentsInputConfig result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.documentai.v1.BatchDocumentsInputConfig buildPartial() { com.google.cloud.documentai.v1.BatchDocumentsInputConfig result = new com.google.cloud.documentai.v1.BatchDocumentsInputConfig(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0(com.google.cloud.documentai.v1.BatchDocumentsInputConfig result) { int from_bitField0_ = bitField0_; } private void buildPartialOneofs( com.google.cloud.documentai.v1.BatchDocumentsInputConfig result) { result.sourceCase_ = sourceCase_; result.source_ = this.source_; if (sourceCase_ == 1 && gcsPrefixBuilder_ != null) { result.source_ = gcsPrefixBuilder_.build(); } if (sourceCase_ == 2 && gcsDocumentsBuilder_ != null) { result.source_ = gcsDocumentsBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.documentai.v1.BatchDocumentsInputConfig) { return mergeFrom((com.google.cloud.documentai.v1.BatchDocumentsInputConfig) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.documentai.v1.BatchDocumentsInputConfig other) { if (other == com.google.cloud.documentai.v1.BatchDocumentsInputConfig.getDefaultInstance()) return this; switch (other.getSourceCase()) { case GCS_PREFIX: { mergeGcsPrefix(other.getGcsPrefix()); break; } case GCS_DOCUMENTS: { mergeGcsDocuments(other.getGcsDocuments()); break; } case SOURCE_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getGcsPrefixFieldBuilder().getBuilder(), extensionRegistry); sourceCase_ = 1; break; } // case 10 case 18: { input.readMessage(getGcsDocumentsFieldBuilder().getBuilder(), extensionRegistry); sourceCase_ = 2; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int sourceCase_ = 0; private java.lang.Object source_; public SourceCase getSourceCase() { return SourceCase.forNumber(sourceCase_); } public Builder clearSource() { sourceCase_ = 0; source_ = null; onChanged(); return this; } private int bitField0_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.documentai.v1.GcsPrefix, com.google.cloud.documentai.v1.GcsPrefix.Builder, com.google.cloud.documentai.v1.GcsPrefixOrBuilder> gcsPrefixBuilder_; /** * * * <pre> * The set of documents that match the specified Cloud Storage `gcs_prefix`. * </pre> * * <code>.google.cloud.documentai.v1.GcsPrefix gcs_prefix = 1;</code> * * @return Whether the gcsPrefix field is set. */ @java.lang.Override public boolean hasGcsPrefix() { return sourceCase_ == 1; } /** * * * <pre> * The set of documents that match the specified Cloud Storage `gcs_prefix`. * </pre> * * <code>.google.cloud.documentai.v1.GcsPrefix gcs_prefix = 1;</code> * * @return The gcsPrefix. */ @java.lang.Override public com.google.cloud.documentai.v1.GcsPrefix getGcsPrefix() { if (gcsPrefixBuilder_ == null) { if (sourceCase_ == 1) { return (com.google.cloud.documentai.v1.GcsPrefix) source_; } return com.google.cloud.documentai.v1.GcsPrefix.getDefaultInstance(); } else { if (sourceCase_ == 1) { return gcsPrefixBuilder_.getMessage(); } return com.google.cloud.documentai.v1.GcsPrefix.getDefaultInstance(); } } /** * * * <pre> * The set of documents that match the specified Cloud Storage `gcs_prefix`. * </pre> * * <code>.google.cloud.documentai.v1.GcsPrefix gcs_prefix = 1;</code> */ public Builder setGcsPrefix(com.google.cloud.documentai.v1.GcsPrefix value) { if (gcsPrefixBuilder_ == null) { if (value == null) { throw new NullPointerException(); } source_ = value; onChanged(); } else { gcsPrefixBuilder_.setMessage(value); } sourceCase_ = 1; return this; } /** * * * <pre> * The set of documents that match the specified Cloud Storage `gcs_prefix`. * </pre> * * <code>.google.cloud.documentai.v1.GcsPrefix gcs_prefix = 1;</code> */ public Builder setGcsPrefix(com.google.cloud.documentai.v1.GcsPrefix.Builder builderForValue) { if (gcsPrefixBuilder_ == null) { source_ = builderForValue.build(); onChanged(); } else { gcsPrefixBuilder_.setMessage(builderForValue.build()); } sourceCase_ = 1; return this; } /** * * * <pre> * The set of documents that match the specified Cloud Storage `gcs_prefix`. * </pre> * * <code>.google.cloud.documentai.v1.GcsPrefix gcs_prefix = 1;</code> */ public Builder mergeGcsPrefix(com.google.cloud.documentai.v1.GcsPrefix value) { if (gcsPrefixBuilder_ == null) { if (sourceCase_ == 1 && source_ != com.google.cloud.documentai.v1.GcsPrefix.getDefaultInstance()) { source_ = com.google.cloud.documentai.v1.GcsPrefix.newBuilder( (com.google.cloud.documentai.v1.GcsPrefix) source_) .mergeFrom(value) .buildPartial(); } else { source_ = value; } onChanged(); } else { if (sourceCase_ == 1) { gcsPrefixBuilder_.mergeFrom(value); } else { gcsPrefixBuilder_.setMessage(value); } } sourceCase_ = 1; return this; } /** * * * <pre> * The set of documents that match the specified Cloud Storage `gcs_prefix`. * </pre> * * <code>.google.cloud.documentai.v1.GcsPrefix gcs_prefix = 1;</code> */ public Builder clearGcsPrefix() { if (gcsPrefixBuilder_ == null) { if (sourceCase_ == 1) { sourceCase_ = 0; source_ = null; onChanged(); } } else { if (sourceCase_ == 1) { sourceCase_ = 0; source_ = null; } gcsPrefixBuilder_.clear(); } return this; } /** * * * <pre> * The set of documents that match the specified Cloud Storage `gcs_prefix`. * </pre> * * <code>.google.cloud.documentai.v1.GcsPrefix gcs_prefix = 1;</code> */ public com.google.cloud.documentai.v1.GcsPrefix.Builder getGcsPrefixBuilder() { return getGcsPrefixFieldBuilder().getBuilder(); } /** * * * <pre> * The set of documents that match the specified Cloud Storage `gcs_prefix`. * </pre> * * <code>.google.cloud.documentai.v1.GcsPrefix gcs_prefix = 1;</code> */ @java.lang.Override public com.google.cloud.documentai.v1.GcsPrefixOrBuilder getGcsPrefixOrBuilder() { if ((sourceCase_ == 1) && (gcsPrefixBuilder_ != null)) { return gcsPrefixBuilder_.getMessageOrBuilder(); } else { if (sourceCase_ == 1) { return (com.google.cloud.documentai.v1.GcsPrefix) source_; } return com.google.cloud.documentai.v1.GcsPrefix.getDefaultInstance(); } } /** * * * <pre> * The set of documents that match the specified Cloud Storage `gcs_prefix`. * </pre> * * <code>.google.cloud.documentai.v1.GcsPrefix gcs_prefix = 1;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.documentai.v1.GcsPrefix, com.google.cloud.documentai.v1.GcsPrefix.Builder, com.google.cloud.documentai.v1.GcsPrefixOrBuilder> getGcsPrefixFieldBuilder() { if (gcsPrefixBuilder_ == null) { if (!(sourceCase_ == 1)) { source_ = com.google.cloud.documentai.v1.GcsPrefix.getDefaultInstance(); } gcsPrefixBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.documentai.v1.GcsPrefix, com.google.cloud.documentai.v1.GcsPrefix.Builder, com.google.cloud.documentai.v1.GcsPrefixOrBuilder>( (com.google.cloud.documentai.v1.GcsPrefix) source_, getParentForChildren(), isClean()); source_ = null; } sourceCase_ = 1; onChanged(); return gcsPrefixBuilder_; } private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.documentai.v1.GcsDocuments, com.google.cloud.documentai.v1.GcsDocuments.Builder, com.google.cloud.documentai.v1.GcsDocumentsOrBuilder> gcsDocumentsBuilder_; /** * * * <pre> * The set of documents individually specified on Cloud Storage. * </pre> * * <code>.google.cloud.documentai.v1.GcsDocuments gcs_documents = 2;</code> * * @return Whether the gcsDocuments field is set. */ @java.lang.Override public boolean hasGcsDocuments() { return sourceCase_ == 2; } /** * * * <pre> * The set of documents individually specified on Cloud Storage. * </pre> * * <code>.google.cloud.documentai.v1.GcsDocuments gcs_documents = 2;</code> * * @return The gcsDocuments. */ @java.lang.Override public com.google.cloud.documentai.v1.GcsDocuments getGcsDocuments() { if (gcsDocumentsBuilder_ == null) { if (sourceCase_ == 2) { return (com.google.cloud.documentai.v1.GcsDocuments) source_; } return com.google.cloud.documentai.v1.GcsDocuments.getDefaultInstance(); } else { if (sourceCase_ == 2) { return gcsDocumentsBuilder_.getMessage(); } return com.google.cloud.documentai.v1.GcsDocuments.getDefaultInstance(); } } /** * * * <pre> * The set of documents individually specified on Cloud Storage. * </pre> * * <code>.google.cloud.documentai.v1.GcsDocuments gcs_documents = 2;</code> */ public Builder setGcsDocuments(com.google.cloud.documentai.v1.GcsDocuments value) { if (gcsDocumentsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } source_ = value; onChanged(); } else { gcsDocumentsBuilder_.setMessage(value); } sourceCase_ = 2; return this; } /** * * * <pre> * The set of documents individually specified on Cloud Storage. * </pre> * * <code>.google.cloud.documentai.v1.GcsDocuments gcs_documents = 2;</code> */ public Builder setGcsDocuments( com.google.cloud.documentai.v1.GcsDocuments.Builder builderForValue) { if (gcsDocumentsBuilder_ == null) { source_ = builderForValue.build(); onChanged(); } else { gcsDocumentsBuilder_.setMessage(builderForValue.build()); } sourceCase_ = 2; return this; } /** * * * <pre> * The set of documents individually specified on Cloud Storage. * </pre> * * <code>.google.cloud.documentai.v1.GcsDocuments gcs_documents = 2;</code> */ public Builder mergeGcsDocuments(com.google.cloud.documentai.v1.GcsDocuments value) { if (gcsDocumentsBuilder_ == null) { if (sourceCase_ == 2 && source_ != com.google.cloud.documentai.v1.GcsDocuments.getDefaultInstance()) { source_ = com.google.cloud.documentai.v1.GcsDocuments.newBuilder( (com.google.cloud.documentai.v1.GcsDocuments) source_) .mergeFrom(value) .buildPartial(); } else { source_ = value; } onChanged(); } else { if (sourceCase_ == 2) { gcsDocumentsBuilder_.mergeFrom(value); } else { gcsDocumentsBuilder_.setMessage(value); } } sourceCase_ = 2; return this; } /** * * * <pre> * The set of documents individually specified on Cloud Storage. * </pre> * * <code>.google.cloud.documentai.v1.GcsDocuments gcs_documents = 2;</code> */ public Builder clearGcsDocuments() { if (gcsDocumentsBuilder_ == null) { if (sourceCase_ == 2) { sourceCase_ = 0; source_ = null; onChanged(); } } else { if (sourceCase_ == 2) { sourceCase_ = 0; source_ = null; } gcsDocumentsBuilder_.clear(); } return this; } /** * * * <pre> * The set of documents individually specified on Cloud Storage. * </pre> * * <code>.google.cloud.documentai.v1.GcsDocuments gcs_documents = 2;</code> */ public com.google.cloud.documentai.v1.GcsDocuments.Builder getGcsDocumentsBuilder() { return getGcsDocumentsFieldBuilder().getBuilder(); } /** * * * <pre> * The set of documents individually specified on Cloud Storage. * </pre> * * <code>.google.cloud.documentai.v1.GcsDocuments gcs_documents = 2;</code> */ @java.lang.Override public com.google.cloud.documentai.v1.GcsDocumentsOrBuilder getGcsDocumentsOrBuilder() { if ((sourceCase_ == 2) && (gcsDocumentsBuilder_ != null)) { return gcsDocumentsBuilder_.getMessageOrBuilder(); } else { if (sourceCase_ == 2) { return (com.google.cloud.documentai.v1.GcsDocuments) source_; } return com.google.cloud.documentai.v1.GcsDocuments.getDefaultInstance(); } } /** * * * <pre> * The set of documents individually specified on Cloud Storage. * </pre> * * <code>.google.cloud.documentai.v1.GcsDocuments gcs_documents = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.documentai.v1.GcsDocuments, com.google.cloud.documentai.v1.GcsDocuments.Builder, com.google.cloud.documentai.v1.GcsDocumentsOrBuilder> getGcsDocumentsFieldBuilder() { if (gcsDocumentsBuilder_ == null) { if (!(sourceCase_ == 2)) { source_ = com.google.cloud.documentai.v1.GcsDocuments.getDefaultInstance(); } gcsDocumentsBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.documentai.v1.GcsDocuments, com.google.cloud.documentai.v1.GcsDocuments.Builder, com.google.cloud.documentai.v1.GcsDocumentsOrBuilder>( (com.google.cloud.documentai.v1.GcsDocuments) source_, getParentForChildren(), isClean()); source_ = null; } sourceCase_ = 2; onChanged(); return gcsDocumentsBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.documentai.v1.BatchDocumentsInputConfig) } // @@protoc_insertion_point(class_scope:google.cloud.documentai.v1.BatchDocumentsInputConfig) private static final com.google.cloud.documentai.v1.BatchDocumentsInputConfig DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.documentai.v1.BatchDocumentsInputConfig(); } public static com.google.cloud.documentai.v1.BatchDocumentsInputConfig getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<BatchDocumentsInputConfig> PARSER = new com.google.protobuf.AbstractParser<BatchDocumentsInputConfig>() { @java.lang.Override public BatchDocumentsInputConfig parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<BatchDocumentsInputConfig> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<BatchDocumentsInputConfig> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.documentai.v1.BatchDocumentsInputConfig getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/sdk-platform-java
35,910
java-showcase/proto-gapic-showcase-v1beta1/src/main/java/com/google/showcase/v1beta1/SearchBlurbsResponse.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: schema/google/showcase/v1beta1/messaging.proto // Protobuf Java Version: 3.25.8 package com.google.showcase.v1beta1; /** * * * <pre> * The operation response message for the * google.showcase.v1beta1.Messaging&#92;SearchBlurbs method. * </pre> * * Protobuf type {@code google.showcase.v1beta1.SearchBlurbsResponse} */ public final class SearchBlurbsResponse extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.showcase.v1beta1.SearchBlurbsResponse) SearchBlurbsResponseOrBuilder { private static final long serialVersionUID = 0L; // Use SearchBlurbsResponse.newBuilder() to construct. private SearchBlurbsResponse(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SearchBlurbsResponse() { blurbs_ = java.util.Collections.emptyList(); nextPageToken_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new SearchBlurbsResponse(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.showcase.v1beta1.MessagingOuterClass .internal_static_google_showcase_v1beta1_SearchBlurbsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.showcase.v1beta1.MessagingOuterClass .internal_static_google_showcase_v1beta1_SearchBlurbsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.showcase.v1beta1.SearchBlurbsResponse.class, com.google.showcase.v1beta1.SearchBlurbsResponse.Builder.class); } public static final int BLURBS_FIELD_NUMBER = 1; @SuppressWarnings("serial") private java.util.List<com.google.showcase.v1beta1.Blurb> blurbs_; /** * * * <pre> * Blurbs that matched the search query. * </pre> * * <code>repeated .google.showcase.v1beta1.Blurb blurbs = 1;</code> */ @java.lang.Override public java.util.List<com.google.showcase.v1beta1.Blurb> getBlurbsList() { return blurbs_; } /** * * * <pre> * Blurbs that matched the search query. * </pre> * * <code>repeated .google.showcase.v1beta1.Blurb blurbs = 1;</code> */ @java.lang.Override public java.util.List<? extends com.google.showcase.v1beta1.BlurbOrBuilder> getBlurbsOrBuilderList() { return blurbs_; } /** * * * <pre> * Blurbs that matched the search query. * </pre> * * <code>repeated .google.showcase.v1beta1.Blurb blurbs = 1;</code> */ @java.lang.Override public int getBlurbsCount() { return blurbs_.size(); } /** * * * <pre> * Blurbs that matched the search query. * </pre> * * <code>repeated .google.showcase.v1beta1.Blurb blurbs = 1;</code> */ @java.lang.Override public com.google.showcase.v1beta1.Blurb getBlurbs(int index) { return blurbs_.get(index); } /** * * * <pre> * Blurbs that matched the search query. * </pre> * * <code>repeated .google.showcase.v1beta1.Blurb blurbs = 1;</code> */ @java.lang.Override public com.google.showcase.v1beta1.BlurbOrBuilder getBlurbsOrBuilder(int index) { return blurbs_.get(index); } public static final int NEXT_PAGE_TOKEN_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve next page of results. * Pass this value in SearchBlurbsRequest.page_token field in the subsequent * call to `google.showcase.v1beta1.Blurb&#92;SearchBlurbs` method to * retrieve the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ @java.lang.Override public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } } /** * * * <pre> * A token to retrieve next page of results. * Pass this value in SearchBlurbsRequest.page_token field in the subsequent * call to `google.showcase.v1beta1.Blurb&#92;SearchBlurbs` method to * retrieve the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ @java.lang.Override public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { for (int i = 0; i < blurbs_.size(); i++) { output.writeMessage(1, blurbs_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, nextPageToken_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; for (int i = 0; i < blurbs_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, blurbs_.get(i)); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(nextPageToken_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, nextPageToken_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.showcase.v1beta1.SearchBlurbsResponse)) { return super.equals(obj); } com.google.showcase.v1beta1.SearchBlurbsResponse other = (com.google.showcase.v1beta1.SearchBlurbsResponse) obj; if (!getBlurbsList().equals(other.getBlurbsList())) return false; if (!getNextPageToken().equals(other.getNextPageToken())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (getBlurbsCount() > 0) { hash = (37 * hash) + BLURBS_FIELD_NUMBER; hash = (53 * hash) + getBlurbsList().hashCode(); } hash = (37 * hash) + NEXT_PAGE_TOKEN_FIELD_NUMBER; hash = (53 * hash) + getNextPageToken().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.showcase.v1beta1.SearchBlurbsResponse parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.showcase.v1beta1.SearchBlurbsResponse parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.showcase.v1beta1.SearchBlurbsResponse parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.showcase.v1beta1.SearchBlurbsResponse parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.showcase.v1beta1.SearchBlurbsResponse parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.showcase.v1beta1.SearchBlurbsResponse parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.showcase.v1beta1.SearchBlurbsResponse parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.showcase.v1beta1.SearchBlurbsResponse parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.showcase.v1beta1.SearchBlurbsResponse parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.showcase.v1beta1.SearchBlurbsResponse parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.showcase.v1beta1.SearchBlurbsResponse parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.showcase.v1beta1.SearchBlurbsResponse parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.showcase.v1beta1.SearchBlurbsResponse prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * The operation response message for the * google.showcase.v1beta1.Messaging&#92;SearchBlurbs method. * </pre> * * Protobuf type {@code google.showcase.v1beta1.SearchBlurbsResponse} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.showcase.v1beta1.SearchBlurbsResponse) com.google.showcase.v1beta1.SearchBlurbsResponseOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.showcase.v1beta1.MessagingOuterClass .internal_static_google_showcase_v1beta1_SearchBlurbsResponse_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.showcase.v1beta1.MessagingOuterClass .internal_static_google_showcase_v1beta1_SearchBlurbsResponse_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.showcase.v1beta1.SearchBlurbsResponse.class, com.google.showcase.v1beta1.SearchBlurbsResponse.Builder.class); } // Construct using com.google.showcase.v1beta1.SearchBlurbsResponse.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (blurbsBuilder_ == null) { blurbs_ = java.util.Collections.emptyList(); } else { blurbs_ = null; blurbsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000001); nextPageToken_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.showcase.v1beta1.MessagingOuterClass .internal_static_google_showcase_v1beta1_SearchBlurbsResponse_descriptor; } @java.lang.Override public com.google.showcase.v1beta1.SearchBlurbsResponse getDefaultInstanceForType() { return com.google.showcase.v1beta1.SearchBlurbsResponse.getDefaultInstance(); } @java.lang.Override public com.google.showcase.v1beta1.SearchBlurbsResponse build() { com.google.showcase.v1beta1.SearchBlurbsResponse result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.showcase.v1beta1.SearchBlurbsResponse buildPartial() { com.google.showcase.v1beta1.SearchBlurbsResponse result = new com.google.showcase.v1beta1.SearchBlurbsResponse(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.showcase.v1beta1.SearchBlurbsResponse result) { if (blurbsBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0)) { blurbs_ = java.util.Collections.unmodifiableList(blurbs_); bitField0_ = (bitField0_ & ~0x00000001); } result.blurbs_ = blurbs_; } else { result.blurbs_ = blurbsBuilder_.build(); } } private void buildPartial0(com.google.showcase.v1beta1.SearchBlurbsResponse result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.nextPageToken_ = nextPageToken_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.showcase.v1beta1.SearchBlurbsResponse) { return mergeFrom((com.google.showcase.v1beta1.SearchBlurbsResponse) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.showcase.v1beta1.SearchBlurbsResponse other) { if (other == com.google.showcase.v1beta1.SearchBlurbsResponse.getDefaultInstance()) return this; if (blurbsBuilder_ == null) { if (!other.blurbs_.isEmpty()) { if (blurbs_.isEmpty()) { blurbs_ = other.blurbs_; bitField0_ = (bitField0_ & ~0x00000001); } else { ensureBlurbsIsMutable(); blurbs_.addAll(other.blurbs_); } onChanged(); } } else { if (!other.blurbs_.isEmpty()) { if (blurbsBuilder_.isEmpty()) { blurbsBuilder_.dispose(); blurbsBuilder_ = null; blurbs_ = other.blurbs_; bitField0_ = (bitField0_ & ~0x00000001); blurbsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getBlurbsFieldBuilder() : null; } else { blurbsBuilder_.addAllMessages(other.blurbs_); } } } if (!other.getNextPageToken().isEmpty()) { nextPageToken_ = other.nextPageToken_; bitField0_ |= 0x00000002; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { com.google.showcase.v1beta1.Blurb m = input.readMessage( com.google.showcase.v1beta1.Blurb.parser(), extensionRegistry); if (blurbsBuilder_ == null) { ensureBlurbsIsMutable(); blurbs_.add(m); } else { blurbsBuilder_.addMessage(m); } break; } // case 10 case 18: { nextPageToken_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.util.List<com.google.showcase.v1beta1.Blurb> blurbs_ = java.util.Collections.emptyList(); private void ensureBlurbsIsMutable() { if (!((bitField0_ & 0x00000001) != 0)) { blurbs_ = new java.util.ArrayList<com.google.showcase.v1beta1.Blurb>(blurbs_); bitField0_ |= 0x00000001; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.showcase.v1beta1.Blurb, com.google.showcase.v1beta1.Blurb.Builder, com.google.showcase.v1beta1.BlurbOrBuilder> blurbsBuilder_; /** * * * <pre> * Blurbs that matched the search query. * </pre> * * <code>repeated .google.showcase.v1beta1.Blurb blurbs = 1;</code> */ public java.util.List<com.google.showcase.v1beta1.Blurb> getBlurbsList() { if (blurbsBuilder_ == null) { return java.util.Collections.unmodifiableList(blurbs_); } else { return blurbsBuilder_.getMessageList(); } } /** * * * <pre> * Blurbs that matched the search query. * </pre> * * <code>repeated .google.showcase.v1beta1.Blurb blurbs = 1;</code> */ public int getBlurbsCount() { if (blurbsBuilder_ == null) { return blurbs_.size(); } else { return blurbsBuilder_.getCount(); } } /** * * * <pre> * Blurbs that matched the search query. * </pre> * * <code>repeated .google.showcase.v1beta1.Blurb blurbs = 1;</code> */ public com.google.showcase.v1beta1.Blurb getBlurbs(int index) { if (blurbsBuilder_ == null) { return blurbs_.get(index); } else { return blurbsBuilder_.getMessage(index); } } /** * * * <pre> * Blurbs that matched the search query. * </pre> * * <code>repeated .google.showcase.v1beta1.Blurb blurbs = 1;</code> */ public Builder setBlurbs(int index, com.google.showcase.v1beta1.Blurb value) { if (blurbsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureBlurbsIsMutable(); blurbs_.set(index, value); onChanged(); } else { blurbsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * Blurbs that matched the search query. * </pre> * * <code>repeated .google.showcase.v1beta1.Blurb blurbs = 1;</code> */ public Builder setBlurbs(int index, com.google.showcase.v1beta1.Blurb.Builder builderForValue) { if (blurbsBuilder_ == null) { ensureBlurbsIsMutable(); blurbs_.set(index, builderForValue.build()); onChanged(); } else { blurbsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Blurbs that matched the search query. * </pre> * * <code>repeated .google.showcase.v1beta1.Blurb blurbs = 1;</code> */ public Builder addBlurbs(com.google.showcase.v1beta1.Blurb value) { if (blurbsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureBlurbsIsMutable(); blurbs_.add(value); onChanged(); } else { blurbsBuilder_.addMessage(value); } return this; } /** * * * <pre> * Blurbs that matched the search query. * </pre> * * <code>repeated .google.showcase.v1beta1.Blurb blurbs = 1;</code> */ public Builder addBlurbs(int index, com.google.showcase.v1beta1.Blurb value) { if (blurbsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureBlurbsIsMutable(); blurbs_.add(index, value); onChanged(); } else { blurbsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * Blurbs that matched the search query. * </pre> * * <code>repeated .google.showcase.v1beta1.Blurb blurbs = 1;</code> */ public Builder addBlurbs(com.google.showcase.v1beta1.Blurb.Builder builderForValue) { if (blurbsBuilder_ == null) { ensureBlurbsIsMutable(); blurbs_.add(builderForValue.build()); onChanged(); } else { blurbsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * Blurbs that matched the search query. * </pre> * * <code>repeated .google.showcase.v1beta1.Blurb blurbs = 1;</code> */ public Builder addBlurbs(int index, com.google.showcase.v1beta1.Blurb.Builder builderForValue) { if (blurbsBuilder_ == null) { ensureBlurbsIsMutable(); blurbs_.add(index, builderForValue.build()); onChanged(); } else { blurbsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * Blurbs that matched the search query. * </pre> * * <code>repeated .google.showcase.v1beta1.Blurb blurbs = 1;</code> */ public Builder addAllBlurbs( java.lang.Iterable<? extends com.google.showcase.v1beta1.Blurb> values) { if (blurbsBuilder_ == null) { ensureBlurbsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, blurbs_); onChanged(); } else { blurbsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * Blurbs that matched the search query. * </pre> * * <code>repeated .google.showcase.v1beta1.Blurb blurbs = 1;</code> */ public Builder clearBlurbs() { if (blurbsBuilder_ == null) { blurbs_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); } else { blurbsBuilder_.clear(); } return this; } /** * * * <pre> * Blurbs that matched the search query. * </pre> * * <code>repeated .google.showcase.v1beta1.Blurb blurbs = 1;</code> */ public Builder removeBlurbs(int index) { if (blurbsBuilder_ == null) { ensureBlurbsIsMutable(); blurbs_.remove(index); onChanged(); } else { blurbsBuilder_.remove(index); } return this; } /** * * * <pre> * Blurbs that matched the search query. * </pre> * * <code>repeated .google.showcase.v1beta1.Blurb blurbs = 1;</code> */ public com.google.showcase.v1beta1.Blurb.Builder getBlurbsBuilder(int index) { return getBlurbsFieldBuilder().getBuilder(index); } /** * * * <pre> * Blurbs that matched the search query. * </pre> * * <code>repeated .google.showcase.v1beta1.Blurb blurbs = 1;</code> */ public com.google.showcase.v1beta1.BlurbOrBuilder getBlurbsOrBuilder(int index) { if (blurbsBuilder_ == null) { return blurbs_.get(index); } else { return blurbsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * Blurbs that matched the search query. * </pre> * * <code>repeated .google.showcase.v1beta1.Blurb blurbs = 1;</code> */ public java.util.List<? extends com.google.showcase.v1beta1.BlurbOrBuilder> getBlurbsOrBuilderList() { if (blurbsBuilder_ != null) { return blurbsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(blurbs_); } } /** * * * <pre> * Blurbs that matched the search query. * </pre> * * <code>repeated .google.showcase.v1beta1.Blurb blurbs = 1;</code> */ public com.google.showcase.v1beta1.Blurb.Builder addBlurbsBuilder() { return getBlurbsFieldBuilder() .addBuilder(com.google.showcase.v1beta1.Blurb.getDefaultInstance()); } /** * * * <pre> * Blurbs that matched the search query. * </pre> * * <code>repeated .google.showcase.v1beta1.Blurb blurbs = 1;</code> */ public com.google.showcase.v1beta1.Blurb.Builder addBlurbsBuilder(int index) { return getBlurbsFieldBuilder() .addBuilder(index, com.google.showcase.v1beta1.Blurb.getDefaultInstance()); } /** * * * <pre> * Blurbs that matched the search query. * </pre> * * <code>repeated .google.showcase.v1beta1.Blurb blurbs = 1;</code> */ public java.util.List<com.google.showcase.v1beta1.Blurb.Builder> getBlurbsBuilderList() { return getBlurbsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.showcase.v1beta1.Blurb, com.google.showcase.v1beta1.Blurb.Builder, com.google.showcase.v1beta1.BlurbOrBuilder> getBlurbsFieldBuilder() { if (blurbsBuilder_ == null) { blurbsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.showcase.v1beta1.Blurb, com.google.showcase.v1beta1.Blurb.Builder, com.google.showcase.v1beta1.BlurbOrBuilder>( blurbs_, ((bitField0_ & 0x00000001) != 0), getParentForChildren(), isClean()); blurbs_ = null; } return blurbsBuilder_; } private java.lang.Object nextPageToken_ = ""; /** * * * <pre> * A token to retrieve next page of results. * Pass this value in SearchBlurbsRequest.page_token field in the subsequent * call to `google.showcase.v1beta1.Blurb&#92;SearchBlurbs` method to * retrieve the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The nextPageToken. */ public java.lang.String getNextPageToken() { java.lang.Object ref = nextPageToken_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); nextPageToken_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * A token to retrieve next page of results. * Pass this value in SearchBlurbsRequest.page_token field in the subsequent * call to `google.showcase.v1beta1.Blurb&#92;SearchBlurbs` method to * retrieve the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return The bytes for nextPageToken. */ public com.google.protobuf.ByteString getNextPageTokenBytes() { java.lang.Object ref = nextPageToken_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); nextPageToken_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * A token to retrieve next page of results. * Pass this value in SearchBlurbsRequest.page_token field in the subsequent * call to `google.showcase.v1beta1.Blurb&#92;SearchBlurbs` method to * retrieve the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageToken(java.lang.String value) { if (value == null) { throw new NullPointerException(); } nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * A token to retrieve next page of results. * Pass this value in SearchBlurbsRequest.page_token field in the subsequent * call to `google.showcase.v1beta1.Blurb&#92;SearchBlurbs` method to * retrieve the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @return This builder for chaining. */ public Builder clearNextPageToken() { nextPageToken_ = getDefaultInstance().getNextPageToken(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * A token to retrieve next page of results. * Pass this value in SearchBlurbsRequest.page_token field in the subsequent * call to `google.showcase.v1beta1.Blurb&#92;SearchBlurbs` method to * retrieve the next page of results. * </pre> * * <code>string next_page_token = 2;</code> * * @param value The bytes for nextPageToken to set. * @return This builder for chaining. */ public Builder setNextPageTokenBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); nextPageToken_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.showcase.v1beta1.SearchBlurbsResponse) } // @@protoc_insertion_point(class_scope:google.showcase.v1beta1.SearchBlurbsResponse) private static final com.google.showcase.v1beta1.SearchBlurbsResponse DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.showcase.v1beta1.SearchBlurbsResponse(); } public static com.google.showcase.v1beta1.SearchBlurbsResponse getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SearchBlurbsResponse> PARSER = new com.google.protobuf.AbstractParser<SearchBlurbsResponse>() { @java.lang.Override public SearchBlurbsResponse parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<SearchBlurbsResponse> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SearchBlurbsResponse> getParserForType() { return PARSER; } @java.lang.Override public com.google.showcase.v1beta1.SearchBlurbsResponse getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
openjdk/jdk8
36,132
corba/src/share/classes/com/sun/corba/se/impl/util/Utility.java
/* * Copyright (c) 1999, 2004, Oracle and/or its affiliates. All rights reserved. * DO NOT ALTER OR REMOVE COPYRIGHT NOTICES OR THIS FILE HEADER. * * This code is free software; you can redistribute it and/or modify it * under the terms of the GNU General Public License version 2 only, as * published by the Free Software Foundation. Oracle designates this * particular file as subject to the "Classpath" exception as provided * by Oracle in the LICENSE file that accompanied this code. * * This code is distributed in the hope that it will be useful, but WITHOUT * ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or * FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License * version 2 for more details (a copy is included in the LICENSE file that * accompanied this code). * * You should have received a copy of the GNU General Public License version * 2 along with this work; if not, write to the Free Software Foundation, * Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA. * * Please contact Oracle, 500 Oracle Parkway, Redwood Shores, CA 94065 USA * or visit www.oracle.com if you need additional information or have any * questions. */ /* * Licensed Materials - Property of IBM * RMI-IIOP v1.0 * Copyright IBM Corp. 1998 1999 All Rights Reserved * */ package com.sun.corba.se.impl.util; import org.omg.CORBA.SystemException; import org.omg.CORBA.CompletionStatus; import org.omg.CORBA.BAD_OPERATION; import org.omg.CORBA.BAD_INV_ORDER; import org.omg.CORBA.BAD_PARAM; import org.omg.CORBA.ORB; import org.omg.CORBA.Any; import org.omg.CORBA.TypeCode; import org.omg.CORBA.Principal; import org.omg.CORBA.portable.InputStream; import org.omg.CORBA.portable.OutputStream; import org.omg.CORBA.portable.BoxedValueHelper; import org.omg.CORBA.portable.ValueFactory; import org.omg.CORBA.portable.Streamable; import org.omg.CORBA.portable.Delegate; import java.util.Hashtable; import java.util.NoSuchElementException; import java.rmi.Remote; import java.rmi.NoSuchObjectException; import java.rmi.RemoteException; import java.rmi.server.RemoteStub; import javax.rmi.PortableRemoteObject; import javax.rmi.CORBA.Stub; import javax.rmi.CORBA.Tie; import javax.rmi.CORBA.Util; import java.io.Serializable; import java.io.File; import java.io.FileInputStream; import org.omg.PortableServer.POA; import com.sun.org.omg.SendingContext.CodeBase; import com.sun.corba.se.spi.logging.CORBALogDomains ; import com.sun.corba.se.spi.presentation.rmi.PresentationManager; import com.sun.corba.se.spi.presentation.rmi.StubAdapter ; import com.sun.corba.se.impl.logging.UtilSystemException ; import com.sun.corba.se.impl.logging.OMGSystemException ; /** * Handy class full of static functions. */ public final class Utility { public static final String STUB_PREFIX = "_"; public static final String RMI_STUB_SUFFIX = "_Stub"; public static final String DYNAMIC_STUB_SUFFIX = "_DynamicStub" ; public static final String IDL_STUB_SUFFIX = "Stub"; public static final String TIE_SUFIX = "_Tie"; private static IdentityHashtable tieCache = new IdentityHashtable(); private static IdentityHashtable tieToStubCache = new IdentityHashtable(); private static IdentityHashtable stubToTieCache = new IdentityHashtable(); private static Object CACHE_MISS = new Object(); private static UtilSystemException wrapper = UtilSystemException.get( CORBALogDomains.UTIL ) ; private static OMGSystemException omgWrapper = OMGSystemException.get( CORBALogDomains.UTIL ) ; /** * Ensure that stubs, ties, and implementation objects * are 'connected' to the runtime. Converts implementation * objects to a type suitable for sending on the wire. * @param obj the object to connect. * @param orb the ORB to connect to if obj is exported to IIOP. * @param convertToStub true if implementation types should be * converted to Stubs rather than just org.omg.CORBA.Object. * @return the connected object. * @exception NoSuchObjectException if obj is an implementation * which has not been exported. */ public static Object autoConnect(Object obj, ORB orb, boolean convertToStub) { if (obj == null) { return obj; } if (StubAdapter.isStub(obj)) { try { StubAdapter.getDelegate(obj) ; } catch (BAD_OPERATION okay) { try { StubAdapter.connect( obj, orb ) ; } catch (RemoteException e) { // The stub could not be connected because it // has an invalid IOR... throw wrapper.objectNotConnected( e, obj.getClass().getName() ) ; } } return obj; } if (obj instanceof Remote) { Remote remoteObj = (Remote)obj; Tie theTie = Util.getTie(remoteObj); if (theTie != null) { try { theTie.orb(); } catch (SystemException okay) { theTie.orb(orb); } if (convertToStub) { Object result = loadStub(theTie,null,null,true); if (result != null) { return result; } else { throw wrapper.couldNotLoadStub(obj.getClass().getName()); } } else { return StubAdapter.activateTie( theTie ); } } else { // This is an implementation object which has not been // exported to IIOP OR is a JRMP stub or implementation // object which cannot be marshalled into an ORB stream... throw wrapper.objectNotExported( obj.getClass().getName() ) ; } } // Didn't need to do anything, just return the input... return obj; } /* * Get a new instance of an RMI-IIOP Tie for the * given server object. */ public static Tie loadTie(Remote obj) { Tie result = null; Class objClass = obj.getClass(); // Have we tried to find this guy before? synchronized (tieCache) { Object it = tieCache.get(obj); if (it == null) { // No, so try it... try { // First try the classname... result = loadTie(objClass); // If we don't have a valid tie at this point, // walk up the parent chain until we either // load a tie or encounter PortableRemoteObject // or java.lang.Object... while (result == null && (objClass = objClass.getSuperclass()) != null && objClass != PortableRemoteObject.class && objClass != Object.class) { result = loadTie(objClass); } } catch (Exception ex) { wrapper.loadTieFailed( ex, objClass.getName() ) ; } // Did we get it? if (result == null) { // Nope, so cache that fact... tieCache.put(obj,CACHE_MISS); } else { // Yes, so cache it... tieCache.put(obj,result); } } else { // Yes, return a new instance or fail again if // it was a miss last time... if (it != CACHE_MISS) { try { result = (Tie) it.getClass().newInstance(); } catch (Exception e) { } } } } return result; } /* * Load an RMI-IIOP Tie */ private static Tie loadTie(Class theClass) { return com.sun.corba.se.spi.orb.ORB.getStubFactoryFactory(). getTie( theClass ) ; } /* * Clear the stub/tie caches. Intended for use by * test code. */ public static void clearCaches() { synchronized (tieToStubCache) { tieToStubCache.clear(); } synchronized (tieCache) { tieCache.clear(); } synchronized (stubToTieCache) { stubToTieCache.clear(); } } /* * Load a class and check that it is assignable to a given type. * @param className the class name. * @param remoteCodebase the codebase to use. May be null. * @param loader the class loader of last resort. May be null. * @param expectedType the expected type. May be null. * @return the loaded class. */ static Class loadClassOfType(String className, String remoteCodebase, ClassLoader loader, Class expectedType, ClassLoader expectedTypeClassLoader) throws ClassNotFoundException { Class loadedClass = null; try { //Sequence finding of the stubs according to spec try{ //If-else is put here for speed up of J2EE. //According to the OMG spec, the if clause is not dead code. //It can occur if some compiler has allowed generation //into org.omg.stub hierarchy for non-offending //classes. This will encourage people to //produce non-offending class stubs in their own hierarchy. if (!PackagePrefixChecker.hasOffendingPrefix( PackagePrefixChecker.withoutPackagePrefix(className))){ loadedClass = Util.loadClass( PackagePrefixChecker.withoutPackagePrefix(className), remoteCodebase, loader); } else { loadedClass = Util.loadClass(className, remoteCodebase, loader); } } catch (ClassNotFoundException cnfe) { loadedClass = Util.loadClass(className, remoteCodebase, loader); } if (expectedType == null) return loadedClass; } catch (ClassNotFoundException cnfe) { if (expectedType == null) throw cnfe; } // If no class was loaded, or if the loaded class is not of the // correct type, make a further attempt to load the correct class // using the classloader of the expected type. // _REVISIT_ Is this step necessary, or should the Util,loadClass // algorithm always produce a valid class if the setup is correct? // Does the OMG standard algorithm need to be changed to include // this step? if (loadedClass == null || !expectedType.isAssignableFrom(loadedClass)){ if (expectedType.getClassLoader() != expectedTypeClassLoader) throw new IllegalArgumentException( "expectedTypeClassLoader not class loader of " + "expected Type."); if (expectedTypeClassLoader != null) loadedClass = expectedTypeClassLoader.loadClass(className); else { ClassLoader cl = Thread.currentThread().getContextClassLoader(); if (cl == null) cl = ClassLoader.getSystemClassLoader(); loadedClass = cl.loadClass(className); } } return loadedClass; } /* * Load a class and check that it is compatible with a given type. * @param className the class name. * @param remoteCodebase the codebase to use. May be null. * @param loadingContext the loading context. May be null. * @param relatedType the related type. May be null. * @return the loaded class. */ public static Class loadClassForClass (String className, String remoteCodebase, ClassLoader loader, Class relatedType, ClassLoader relatedTypeClassLoader) throws ClassNotFoundException { if (relatedType == null) return Util.loadClass(className, remoteCodebase, loader); Class loadedClass = null; try { loadedClass = Util.loadClass(className, remoteCodebase, loader); } catch (ClassNotFoundException cnfe) { if (relatedType.getClassLoader() == null) throw cnfe; } // If no class was not loaded, or if the loaded class is not of the // correct type, make a further attempt to load the correct class // using the classloader of the related type. // _REVISIT_ Is this step necessary, or should the Util,loadClass // algorithm always produce a valid class if the setup is correct? // Does the OMG standard algorithm need to be changed to include // this step? if (loadedClass == null || (loadedClass.getClassLoader() != null && loadedClass.getClassLoader().loadClass(relatedType.getName()) != relatedType)) { if (relatedType.getClassLoader() != relatedTypeClassLoader) throw new IllegalArgumentException( "relatedTypeClassLoader not class loader of relatedType."); if (relatedTypeClassLoader != null) loadedClass = relatedTypeClassLoader.loadClass(className); } return loadedClass; } /** * Get the helper for an IDLValue * * Throws MARSHAL exception if no helper found. */ public static BoxedValueHelper getHelper(Class clazz, String codebase, String repId) { String className = null; if (clazz != null) { className = clazz.getName(); if (codebase == null) codebase = Util.getCodebase(clazz); } else { if (repId != null) className = RepositoryId.cache.getId(repId).getClassName(); if (className == null) // no repId or unrecognized repId throw wrapper.unableLocateValueHelper( CompletionStatus.COMPLETED_MAYBE); } try { ClassLoader clazzLoader = (clazz == null ? null : clazz.getClassLoader()); Class helperClass = loadClassForClass(className+"Helper", codebase, clazzLoader, clazz, clazzLoader); return (BoxedValueHelper)helperClass.newInstance(); } catch (ClassNotFoundException cnfe) { throw wrapper.unableLocateValueHelper( CompletionStatus.COMPLETED_MAYBE, cnfe ); } catch (IllegalAccessException iae) { throw wrapper.unableLocateValueHelper( CompletionStatus.COMPLETED_MAYBE, iae ); } catch (InstantiationException ie) { throw wrapper.unableLocateValueHelper( CompletionStatus.COMPLETED_MAYBE, ie ); } catch (ClassCastException cce) { throw wrapper.unableLocateValueHelper( CompletionStatus.COMPLETED_MAYBE, cce ); } } /** * Get the factory for an IDLValue * * Throws MARSHAL exception if no factory found. */ public static ValueFactory getFactory(Class clazz, String codebase, ORB orb, String repId) { ValueFactory factory = null; if ((orb != null) && (repId != null)) { try { factory = ((org.omg.CORBA_2_3.ORB)orb).lookup_value_factory( repId); } catch (org.omg.CORBA.BAD_PARAM ex) { // Try other way } } String className = null; if (clazz != null) { className = clazz.getName(); if (codebase == null) codebase = Util.getCodebase(clazz); } else { if (repId != null) className = RepositoryId.cache.getId(repId).getClassName(); if (className == null) // no repId or unrecognized repId throw omgWrapper.unableLocateValueFactory( CompletionStatus.COMPLETED_MAYBE); } // if earlier search found a non-default factory, or the same default // factory that loadClassForClass would return, bale out now... if (factory != null && (!factory.getClass().getName().equals(className+"DefaultFactory") || (clazz == null && codebase == null))) return factory; try { ClassLoader clazzLoader = (clazz == null ? null : clazz.getClassLoader()); Class factoryClass = loadClassForClass(className+"DefaultFactory", codebase, clazzLoader, clazz, clazzLoader); return (ValueFactory)factoryClass.newInstance(); } catch (ClassNotFoundException cnfe) { throw omgWrapper.unableLocateValueFactory( CompletionStatus.COMPLETED_MAYBE, cnfe); } catch (IllegalAccessException iae) { throw omgWrapper.unableLocateValueFactory( CompletionStatus.COMPLETED_MAYBE, iae); } catch (InstantiationException ie) { throw omgWrapper.unableLocateValueFactory( CompletionStatus.COMPLETED_MAYBE, ie); } catch (ClassCastException cce) { throw omgWrapper.unableLocateValueFactory( CompletionStatus.COMPLETED_MAYBE, cce); } } /* * Load an RMI-IIOP Stub given a Tie. * @param tie the tie. * @param stubClass the stub class. May be null. * @param remoteCodebase the codebase to use. May be null. * @param onlyMostDerived if true, will fail if cannot load a stub for the * first repID in the tie. If false, will walk all repIDs. * @return the stub or null if not found. */ public static Remote loadStub(Tie tie, PresentationManager.StubFactory stubFactory, String remoteCodebase, boolean onlyMostDerived) { StubEntry entry = null; // Do we already have it cached? synchronized (tieToStubCache) { Object cached = tieToStubCache.get(tie); if (cached == null) { // No, so go try to load it... entry = loadStubAndUpdateCache( tie, stubFactory, remoteCodebase, onlyMostDerived); } else { // Yes, is it a stub? If not, it was a miss last // time, so return null again... if (cached != CACHE_MISS) { // It's a stub. entry = (StubEntry) cached; // Does the cached stub meet the requirements // of the caller? If the caller does not require // the most derived stub and does not require // a specific stub type, we don't have to check // any further because the cached type is good // enough... if (!entry.mostDerived && onlyMostDerived) { // We must reload because we do not have // the most derived cached already... // The stubFactory arg must be null here // to force onlyMostDerived=true to work // correctly. entry = loadStubAndUpdateCache(tie,null, remoteCodebase,true); } else if (stubFactory != null && !StubAdapter.getTypeIds(entry.stub)[0].equals( stubFactory.getTypeIds()[0]) ) { // We do not have exactly the right stub. First, try to // upgrade the cached stub by forcing it to the most // derived stub... entry = loadStubAndUpdateCache(tie,null, remoteCodebase,true); // If that failed, try again with the exact type // we need... if (entry == null) { entry = loadStubAndUpdateCache(tie,stubFactory, remoteCodebase,onlyMostDerived); } } else { // Use the cached stub. Is the delegate set? try { Delegate stubDel = StubAdapter.getDelegate( entry.stub ) ; } catch (Exception e2) { // No, so set it if we can... try { Delegate del = StubAdapter.getDelegate( tie ) ; StubAdapter.setDelegate( entry.stub, del ) ; } catch (Exception e) {} } } } } } if (entry != null) { return (Remote)entry.stub; } else { return null; } } /* * Load an RMI-IIOP Stub given a Tie, but do not look in the cache. * This method must be called with the lock held for tieToStubCache. * @param tie the tie. * @param stubFactory the stub factory. May be null. * @param remoteCodebase the codebase to use. May be null. * @param onlyMostDerived if true, will fail if cannot load a stub for the * first repID in the tie. If false, will walk all repIDs. * @return the StubEntry or null if not found. */ private static StubEntry loadStubAndUpdateCache ( Tie tie, PresentationManager.StubFactory stubFactory, String remoteCodebase, boolean onlyMostDerived) { org.omg.CORBA.Object stub = null; StubEntry entry = null; boolean tieIsStub = StubAdapter.isStub( tie ) ; if (stubFactory != null) { try { stub = stubFactory.makeStub(); } catch (Throwable e) { wrapper.stubFactoryCouldNotMakeStub( e ) ; if (e instanceof ThreadDeath) { throw (ThreadDeath) e; } } } else { String[] ids = null; if (tieIsStub) { ids = StubAdapter.getTypeIds( tie ) ; } else { // This will throw an exception if the tie // is not a Servant. XXX Handle this better? ids = ((org.omg.PortableServer.Servant)tie). _all_interfaces( null, null ); } if (remoteCodebase == null) { remoteCodebase = Util.getCodebase(tie.getClass()); } if (ids.length == 0) { stub = new org.omg.stub.java.rmi._Remote_Stub(); } else { // Now walk all the RepIDs till we find a stub or fail... for (int i = 0; i < ids.length; i++) { if (ids[i].length() == 0) { stub = new org.omg.stub.java.rmi._Remote_Stub(); break; } try { PresentationManager.StubFactoryFactory stubFactoryFactory = com.sun.corba.se.spi.orb.ORB.getStubFactoryFactory(); RepositoryId rid = RepositoryId.cache.getId( ids[i] ) ; String className = rid.getClassName() ; boolean isIDLInterface = rid.isIDLType() ; stubFactory = stubFactoryFactory.createStubFactory( className, isIDLInterface, remoteCodebase, null, tie.getClass().getClassLoader() ) ; stub = stubFactory.makeStub(); break; } catch (Exception e) { wrapper.errorInMakeStubFromRepositoryId( e ) ; } if (onlyMostDerived) break; } } } if (stub == null) { // Stub == null, so cache the miss... tieToStubCache.put(tie,CACHE_MISS); } else { if (tieIsStub) { try { Delegate del = StubAdapter.getDelegate( tie ) ; StubAdapter.setDelegate( stub, del ) ; } catch( Exception e1 ) { // The tie does not have a delegate set, so stash // this tie away using the stub as a key so that // later, when the stub is connected, we can find // and connect the tie as well... synchronized (stubToTieCache) { stubToTieCache.put(stub,tie); } } } else { // Tie extends Servant try { Delegate delegate = StubAdapter.getDelegate( tie ) ; StubAdapter.setDelegate( stub, delegate ) ; } catch( org.omg.CORBA.BAD_INV_ORDER bad) { synchronized (stubToTieCache) { stubToTieCache.put(stub,tie); } } catch( Exception e ) { // Exception is caught because of any of the // following reasons // 1) POA is not associated with the TIE // 2) POA Policies for the tie-associated POA // does not support _this_object() call. throw wrapper.noPoa( e ) ; } } // Update the cache... entry = new StubEntry(stub,onlyMostDerived); tieToStubCache.put(tie,entry); } return entry; } /* * If we loadStub(Tie,...) stashed away a tie which was * not connected, remove it from the cache and return * it. */ public static Tie getAndForgetTie (org.omg.CORBA.Object stub) { synchronized (stubToTieCache) { return (Tie) stubToTieCache.remove(stub); } } /* * Remove any cached Stub for the given tie. */ public static void purgeStubForTie (Tie tie) { StubEntry entry; synchronized (tieToStubCache) { entry = (StubEntry)tieToStubCache.remove(tie); } if (entry != null) { synchronized (stubToTieCache) { stubToTieCache.remove(entry.stub); } } } /* * Remove cached tie/servant pair. */ public static void purgeTieAndServant (Tie tie) { synchronized (tieCache) { Object target = tie.getTarget(); if (target != null) tieCache.remove(target); } } /* * Convert a RepId to a stubName... */ public static String stubNameFromRepID (String repID) { // Convert the typeid to a RepositoryId instance, get // the className and mangle it as needed... RepositoryId id = RepositoryId.cache.getId(repID); String className = id.getClassName(); if (id.isIDLType()) { className = idlStubName(className); } else { className = stubName(className); } return className; } /* * Load an RMI-IIOP Stub. This is used in PortableRemoteObject.narrow. */ public static Remote loadStub (org.omg.CORBA.Object narrowFrom, Class narrowTo) { Remote result = null; try { // Get the codebase from the delegate to use when loading // the new stub, if possible... String codebase = null; try { // We can't assume that narrowFrom is a CORBA_2_3 stub, yet // it may have a 2_3 Delegate that provides a codebase. Swallow // the ClassCastException otherwise. Delegate delegate = StubAdapter.getDelegate( narrowFrom ) ; codebase = ((org.omg.CORBA_2_3.portable.Delegate)delegate). get_codebase(narrowFrom); } catch (ClassCastException e) { wrapper.classCastExceptionInLoadStub( e ) ; } PresentationManager.StubFactoryFactory sff = com.sun.corba.se.spi.orb.ORB.getStubFactoryFactory() ; PresentationManager.StubFactory sf = sff.createStubFactory( narrowTo.getName(), false, codebase, narrowTo, narrowTo.getClassLoader() ) ; result = (Remote)sf.makeStub() ; StubAdapter.setDelegate( result, StubAdapter.getDelegate( narrowFrom ) ) ; } catch (Exception err) { wrapper.exceptionInLoadStub( err ) ; } return result; } /* * Load an RMI-IIOP Stub class. This is used in the * StaticStubFactoryFactory code. */ public static Class loadStubClass(String repID, String remoteCodebase, Class expectedType) throws ClassNotFoundException { // Get the repID and check for "" special case. // We should never be called with it (See CDRInputStream // and the loadStub() method)... if (repID.length() == 0) { throw new ClassNotFoundException(); } // Get the stubname from the repID and load // the class. If we have a valid 'sender', fall // back to using its codebase if we need to... String className = Utility.stubNameFromRepID(repID); ClassLoader expectedTypeClassLoader = (expectedType == null ? null : expectedType.getClassLoader()); try { return loadClassOfType(className, remoteCodebase, expectedTypeClassLoader, expectedType, expectedTypeClassLoader); } catch (ClassNotFoundException e) { return loadClassOfType(PackagePrefixChecker.packagePrefix() + className, remoteCodebase, expectedTypeClassLoader, expectedType, expectedTypeClassLoader); } } /** * Create an RMI stub name. */ public static String stubName (String className) { return stubName( className, false ) ; } public static String dynamicStubName( String className ) { return stubName( className, true ) ; } private static String stubName( String className, boolean isDynamic ) { String name = stubNameForCompiler( className, isDynamic ) ; if (PackagePrefixChecker.hasOffendingPrefix( name )) name = PackagePrefixChecker.packagePrefix() + name ; return name ; } public static String stubNameForCompiler (String className) { return stubNameForCompiler( className, false ) ; } private static String stubNameForCompiler( String className, boolean isDynamic ) { int index = className.indexOf('$'); if (index < 0) { index = className.lastIndexOf('.'); } String suffix = isDynamic ? DYNAMIC_STUB_SUFFIX : RMI_STUB_SUFFIX ; if (index > 0) { return className.substring(0,index+1) + STUB_PREFIX + className.substring(index+1) + suffix; } else { return STUB_PREFIX + className + suffix; } } /** * Create an RMI tie name. */ public static String tieName (String className) { return PackagePrefixChecker.hasOffendingPrefix(tieNameForCompiler(className)) ? PackagePrefixChecker.packagePrefix() + tieNameForCompiler(className) : tieNameForCompiler(className); } public static String tieNameForCompiler (String className) { int index = className.indexOf('$'); if (index < 0) { index = className.lastIndexOf('.'); } if (index > 0) { return className.substring(0,index+1) + STUB_PREFIX + className.substring(index+1) + TIE_SUFIX; } else { return STUB_PREFIX + className + TIE_SUFIX; } } /** * Throws the CORBA equivalent of a java.io.NotSerializableException */ public static void throwNotSerializableForCorba(String className) { throw omgWrapper.notSerializable( CompletionStatus.COMPLETED_MAYBE, className ) ; } /** * Create an IDL stub name. */ public static String idlStubName(String className) { String result = null; int index = className.lastIndexOf('.'); if (index > 0) { result = className.substring(0,index+1) + STUB_PREFIX + className.substring(index+1) + IDL_STUB_SUFFIX; } else { result = STUB_PREFIX + className + IDL_STUB_SUFFIX; } return result; } public static void printStackTrace() { Throwable thr = new Throwable( "Printing stack trace:" ) ; thr.fillInStackTrace() ; thr.printStackTrace() ; } /** * Read an object reference from the input stream and narrow * it to the desired type. * @param in the stream to read from. * @throws ClassCastException if narrowFrom cannot be cast to narrowTo. */ public static Object readObjectAndNarrow(InputStream in, Class narrowTo) throws ClassCastException { Object result = in.read_Object(); if (result != null) return PortableRemoteObject.narrow(result, narrowTo); else return null; } /** * Read an abstract interface type from the input stream and narrow * it to the desired type. * @param in the stream to read from. * @throws ClassCastException if narrowFrom cannot be cast to narrowTo. */ public static Object readAbstractAndNarrow( org.omg.CORBA_2_3.portable.InputStream in, Class narrowTo) throws ClassCastException { Object result = in.read_abstract_interface(); if (result != null) return PortableRemoteObject.narrow(result, narrowTo); else return null; } /** Converts an Ascii Character into Hexadecimal digit */ static int hexOf( char x ) { int val; val = x - '0'; if (val >=0 && val <= 9) return val; val = (x - 'a') + 10; if (val >= 10 && val <= 15) return val; val = (x - 'A') + 10; if (val >= 10 && val <= 15) return val; throw wrapper.badHexDigit() ; } } class StubEntry { org.omg.CORBA.Object stub; boolean mostDerived; StubEntry(org.omg.CORBA.Object stub, boolean mostDerived) { this.stub = stub; this.mostDerived = mostDerived; } }
google/closure-stylesheets
34,916
tests/com/google/common/css/compiler/ast/GssParserTest.java
/* * Copyright 2009 Google Inc. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.common.css.compiler.ast; import static com.google.common.truth.Truth.assertThat; import static com.google.common.truth.Truth.assertWithMessage; import com.google.common.collect.ImmutableList; import com.google.common.css.SourceCode; import com.google.common.css.SourceCodeLocation; import com.google.common.css.compiler.passes.CompactPrinter; import com.google.common.css.compiler.passes.testing.AstPrinter; import java.util.List; import org.junit.Assert; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** * Unit tests for the {@link GssParser}. * * @author fbenz@google.com (Florian Benz) */ @RunWith(JUnit4.class) public class GssParserTest { private CssTree testValid(String gss) throws GssParserException { CssTree tree = parse(gss); assertThat(tree).isNotNull(); return tree; } private void testTree(String gss, String output) throws GssParserException { CssTree tree = parse(gss); assertThat(tree).isNotNull(); CssRootNode root = tree.getRoot(); assertThat(root).isNotNull(); assertThat(AstPrinter.print(tree)).isEqualTo(output); } @Test public void testManySources() throws Exception { CssTree tree = parse(ImmutableList.of( new SourceCode("test1", "a {}"), new SourceCode("test2", "@component c { x {y: z} }"), new SourceCode("test3", "b {}"))); CssRootNode root = tree.getRoot(); assertThat(root).isNotNull(); assertThat(AstPrinter.print(tree)).isEqualTo("[[a]{[]}@component [c]{[x]{[y:[[z]];]}}[b]{[]}]"); } @Test public void testAst1() throws Exception { testTree("a {}", "[[a]{[]}]"); } @Test public void testAst2() throws Exception { testTree("a.b c#d > e.f + g {}", "[[a.b c#d>e.f+g]{[]}]"); } @Test public void testAst3() throws Exception { testTree("a {x: y}", "[[a]{[x:[[y]];]}]"); } @Test public void testAst4() throws Exception { testTree("a {w: x; y: z}", "[[a]{[w:[[x]];y:[[z]];]}]"); } @Test public void testAst5() throws Exception { testTree("a {b: 1em}", "[[a]{[b:[[1em]];]}]"); } @Test public void testAst6() throws Exception { testTree("a {b: 1.5em}", "[[a]{[b:[[1.5em]];]}]"); } @Test public void testAst7() throws Exception { testTree("a {b: 'x'}", "[[a]{[b:[['x']];]}]"); } @Test public void testAst8() throws Exception { testTree("a {b: url(#x)}", "[[a]{[b:[url(#x)];]}]"); } @Test public void testAst9() throws Exception { testTree("a {b: url('#x')}", "[[a]{[b:[url('#x')];]}]"); } @Test public void testAst10() throws Exception { testTree("a {b: x y z}", "[[a]{[b:[[x][y][z]];]}]"); } @Test public void testAst11() throws Exception { testTree("a {b: c,d,e/f g,h i j,k}", "[[a]{[b:[[[c],[d],[[e]/[f]]][[g],[h]][i][[j],[k]]];]}]"); } @Test public void testAst12() throws Exception { testTree("a {b: rgb(0,0,0)}", "[[a]{[b:[rgb(0,0,0)];]}]"); } @Test public void testAst13() throws Exception { testTree("a {b: custom(0,0)}", "[[a]{[b:[custom(0,0)];]}]"); } @Test public void testAst14() throws Exception { testTree("@def a b;", "[@def [a] [b];]"); } @Test public void testAst15() throws Exception { testTree("@component a { x {y: z} }", "[@component [a]{[x]{[y:[[z]];]}}]"); } @Test public void testAst16() throws Exception { testTree("a:foo {\n bla : d ; }", "[[a:foo]{[bla:[[d]];]}]"); } @Test public void testAst17() throws Exception { testTree("foo {f: rgb(o=0);}", "[[foo]{[f:[rgb([[o]=[0]])];]}]"); } @Test public void testAst18() throws Exception { testTree("a:lang(c) { d: e }", "[[a:lang(c)]{[d:[[e]];]}]"); } @Test public void testAst19() throws Exception { testTree("a~b { d: e }", "[[a~b]{[d:[[e]];]}]"); } @Test public void testAst20() throws Exception { testTree("a:b(-2n+3) { d: e }", "[[a:b(-2n+3)]{[d:[[e]];]}]"); } @Test public void testAst21() throws Exception { testTree("a:not(#id) { d: e }", "[[a:not(#id)]{[d:[[e]];]}]"); } @Test public void testAst22() throws Exception { testTree(".a { d:e,f }", "[[.a]{[d:[[[e],[f]]];]}]"); } @Test public void testAst23() throws Exception { testTree(".a { d:e f,g h }", "[[.a]{[d:[[e][[f],[g]][h]];]}]"); } @Test public void testAst24() throws Exception { testTree("a~b/deep/c { d: e }", "[[a~b/deep/c]{[d:[[e]];]}]"); } @Test public void testParsingRules1() throws Exception { testValid("css_rule33 {\n" + "border: black ; /* comment */\n" + "height : 1em\n" + " }" ); } // We don't test for comments between '!' and 'important'. See the comment on // the IMPORTANT_SYM in the grammar for the reason. @Test public void testParsingRules2() throws Exception { testValid("ul.navbar {\n" + " position: absolute;\n" + " top: top;\n" + " left: down;\n" + " width: nice }\n" + "\n" + ".foo {\n" + " position: absolute ! important ;\n" + "}\n" + ".bar {\n" + " position: absolute ! important;\n\n\n" + "}" ); } @Test public void testParsingRules3() throws Exception { testValid("css_rule33 test2 {\n" + "border: black ; /* comment */\n" + "height : 1em\n" + " }" ); } @Test public void testParsingRules4() throws Exception { testValid("p:before {content: counter(par-num, upper-roman) \". \"}"); } @Test public void testParsingSelector1() throws Exception { testValid("a b { x: y}"); } @Test public void testParsingSelector2() throws Exception { testValid("a > b { x: y}"); } @Test public void testParsingSelector3() throws Exception { testValid("a + b { x: y}"); } @Test public void testParsingSelector4() throws Exception { testValid("a + b > c d e.f + g { x: y}"); } @Test public void testParsingSelector5() throws Exception { testValid("a + b > c d e.f#d + g {}"); } @Test public void testParsingSelector6() throws Exception { testValid("a ~ b { x: y}"); } @Test public void testParsingSelector7() throws Exception { testValid("a /deep/ b { x: y}"); } @Test public void testParsingExpr1() throws Exception { testValid("aab {x:s r t}"); } @Test public void testParsingExpr2() throws Exception { testValid("aab {x:s 1em t}"); } @Test public void testParsingExpr3() throws Exception { testValid("aab {x:-1px +1px -1px 1.7px}"); } @Test public void testParsingURL() throws Exception { testValid("a { x: url('http://test.com') }"); } @Test public void testParsingHexcolor() throws Exception { testValid("a { x: #fff }"); } @Test public void testParsingFunction1Arg() throws Exception { testValid("a { x: f(1) }"); } @Test public void testParsingFunctionManyArgs() throws Exception { testValid("a { x: f(1, 2, 3) }"); } @Test public void testParsingFilterFunctions() throws Exception { testValid("a { filter: drop-shadow(1 2 3) custom(1 2 3);" + "filter: drop-shadow(1, 2, 3) custom(1, 2, 3);}"); } @Test public void testParsingWebkitFilterFunctions() throws Exception { testValid("a { filter: -webkit-drop-shadow(1 2) -webkit-custom(1 2);" + "filter: -webkit-drop-shadow(1, 2) -webkit-custom(1, 2);}"); } @Test public void testParsingLocalFunctions() throws Exception { testValid("@font-face { src: local(Gentium), url(Gentium.woff);" + "src: local(Gentium Bold), local(Gentium-Bold), url(GentiumBold.woff);}"); } @Test public void testParsingAt1() throws Exception { testValid("@import url('http://test.com/test.css');"); } @Test public void testParsingAt2() throws Exception { testValid("@import url(http://test.com/test.css);"); } @Test public void testParsingAt3() throws Exception { testValid("@component a extends b {\n" + "@def z 1;\n" + "x {y: z}\n" + "}"); } @Test public void testParsingDef1() throws Exception { testValid("@def RC_TOP_LEFT tl;\n" + "@def RC_TOP_RIGHT tr;\n" + "@def BASE_WARNING_LINK_COLOR #c3d9ff; /* light blue */" ); } @Test public void testParsingDef3() throws Exception { testValid("@def A_B /* @default */ inherit;"); } @Test public void testParsingAttribute1() throws Exception { testValid("a[href=\"http://www.w3.org/\"]{\n" + "bla:d\n" + "}"); } @Test public void testParsingAttribute2() throws Exception { testValid("*[lang|=\"en\"] { color : red }"); } @Test public void testParsingPseudo1() throws Exception { testValid("a:foo {\n bla : d ; }"); } @Test public void testParsingPseudo2() throws Exception { testValid("a:lang(en) {\n bla : d ; }"); } @Test public void testParsingIf1() throws Exception { testValid("@if (RTL_LANG) {\n" + " @def RTL_FLAG 1; \n" + " @def LEFT right;\n" + "} @else {\n" + " @def IMGURL url('images/image.gif');\n" + "}"); } @Test public void testParsingIf2() throws Exception { testValid("@if BROWSER_IE6 {\n" + " @def FUNBOX_MARGIN 0;\n" + "} @elseif BROWSER_IE {\n" + " @def FUNBOX_MARGIN 1 0 -1px 0;\n" + "} @elseif BROWSER_FF3_OR_HIGHER {\n" + " @def FUNBOX_MARGIN -2px 0 0 0;\n" + "} @else {\n" + " @if(A) { @def BB 23; }\n" + " @def FUNBOX_MARGIN -2px 0 -1px 0;\n" + "}"); } @Test public void testParsingIf3() throws Exception { testValid("@if (RTL_LANG) {\n" + " CSS_RULE2.CLASS#id{ d:34em; }\n" + "} @else {\n" + "}"); } @Test public void testParsingParenthesizedTerm() throws Exception { testValid("@if (FOO) { x { y: z } }"); } @Test public void testParsingBooleanTerm1() throws Exception { testValid("@if ( A && (!B || C )) { @def RTL_FLAG 1;}"); } @Test public void testParsingBooleanTerm2() throws Exception { testValid("@if (!A && !B || C || !(F && G ) ) { @def RTL_FLAG 1;}"); } @Test public void testParsingComplexDef1() throws Exception { testValid("@def A a, b, c;"); } @Test public void testParsingComplexDef2() throws Exception { testValid("@def FONT a, b, c 14px/2em #fff;"); } @Test public void testParsingEqualsOperator() throws Exception { testValid(".CSS_ {\n" + " filter: alpha(opacity = 85) ;\n" + "}"); } @Test public void testParsingColonFunctionName() throws Exception { testValid("x {y: a.b:c(d)}"); } @Test public void testParsingColonFunctionName2() throws Exception { testValid(".CSS_ {\n" + "-ms-filter: \"progid:DXImageTr.Microsoft.Alpha(Opacity=80)\" ;\n" + "filter: progid:DXImageTr.Microsoft.AlphaImageLoader" + "(src='images/muc_bubble_left.png', sizingMethod='scale' );\n" + "}"); } @Test public void testParsingEmptyPseudo() throws Exception { testValid("::a, :a[b]::c { x: y}"); } @Test public void testParsingArbitraryDim() throws Exception { testValid("a {x: 2emelet 3x 5t}"); } @Test public void testSelectorWithSpace() throws Exception { testValid("a /* x */ , b {x: y}"); } @Test public void testIeRect() throws Exception { // Non-standard IE workaround. testValid(".a { clip: rect(0 0 0 0);}"); } @Test public void testEllipse() throws Exception { testValid(".a { clip-path: ellipse(150px 300px at 50% 50%);}"); } @Test public void testInset() throws Exception { testValid(".a { clip-path: inset(100px 100px 100px 100px);}"); } @Test public void testCircle() throws Exception { testValid(".a { clip-path: circle(50% at right 5px bottom 10px);}"); } @Test public void testPolygon() throws Exception { testValid(".a { clip-path: polygon(0 0, 0 300px, 300px 600px);}"); } @Test public void testEqualAttribute() throws Exception { testValid("h1[foo=\"bar\"] {x : y}"); } @Test public void testCaretEqualAttribute() throws Exception { testValid("h1[foo^=\"bar\"] {x : y}"); } @Test public void testDollarEqualAttribute() throws Exception { testValid("h1[foo$=\"bar\"] {x : y}"); } @Test public void testAsteriskEqualAttribute() throws Exception { testValid("h1[foo*=\"bar\"] {x : y}"); } @Test public void testPipeEqualAttribute() throws Exception { testValid("h1[foo|=\"bar\"] {x : y}"); } @Test public void testImageSet() throws Exception { testValid("div:before {" + "content: -webkit-image-set(url(a.png) 1x, url(b.png) 2x);" + "content: -moz-image-set(url(a.png) 1x, url(b.png) 2x);" + "content: -o-image-set(url(a.png) 1x, url(b.png) 2x);" + "content: image-set(url(a.png) 1x, url(b.png) 2x);" + "}"); } @Test public void testWebkitGradient() throws Exception { CssTree tree = testValid(".CSS { background: " + "-webkit-gradient(linear, 0 0, 0 100%, from(#fff), to(#ddd)) }"); CssRootNode root = tree.getRoot(); assertThat(root).isNotNull(); assertThat(AstPrinter.print(tree)) .isEqualTo( "[[.CSS]{[background:[" + "-webkit-gradient(linear,0 0,0 100%,from(#fff),to(#ddd))];]}]"); CssRulesetNode ruleset = (CssRulesetNode) tree.getRoot().getBody().getChildAt(0); CssDeclarationNode decl = (CssDeclarationNode) ruleset.getDeclarations().getChildAt(0); CssFunctionNode function = (CssFunctionNode) decl.getPropertyValue().getChildAt(0); CssFunctionArgumentsNode args = function.getArguments(); assertWithMessage( "The argument list should be flattened, and contain " + "7 arguments + 6 separators (4 commas and 2 meaningful spaces).") .that(args.numChildren()) .isEqualTo(13); } @Test public void testGradients() throws Exception { testValid("div {" + "a:radial-gradient(-88px, -500px, #6A6A7A, #333, #000);" + "b:radial-gradient(30% 30%, closest-corner, white, black);" + "c:radial-gradient(center, 5em 40px, white, black);" + "d:linear-gradient(bottom left, red 20px, yellow, green," + "blue 90%);" + "e:repeating-linear-gradient(left, red 10%, blue 30%);" + "f:repeating-radial-gradient(top left, circle, red, blue 10%," + "red 20%);" + "}"); } /* http://www.webkit.org/blog/1424/css3-gradients/ */ @Test public void testWebkitGradients() throws Exception { testValid("div {" + "a:-webkit-radial-gradient(-88px, -500px, #6A6A7A, #333, #000);" + "b:-webkit-radial-gradient(30% 30%, closest-corner, white, black);" + "c:-webkit-radial-gradient(center, 5em 40px, white, black);" + "d:-webkit-linear-gradient(bottom left, red 20px, yellow, green," + "blue 90%);" + "e:-webkit-repeating-linear-gradient(left, red 10%, blue 30%);" + "f:-webkit-repeating-radial-gradient(top left, circle, red, blue 10%," + "red 20%);" + "}"); } @Test public void testMozillaGradients() throws Exception { testValid("div {" + "a:-moz-radial-gradient(-88px, -500px, #6A6A7A, #333, #000);" + "b:-moz-radial-gradient(30% 30%, closest-corner, white, black);" + "c:-moz-radial-gradient(center, 5em 40px, white, black);" + "d:-moz-linear-gradient(bottom left, red 20px, yellow, green," + "blue 90%);" + "e:-moz-repeating-linear-gradient(left, red 10%, blue 30%);" + "f:-moz-repeating-radial-gradient(top left, circle, red, blue 10%," + "red 20%);" + "}"); } @Test public void testOperaGradients() throws Exception { testValid("div {" + "a:-o-radial-gradient(-88px, -500px, #6A6A7A, #333, #000);" + "b:-o-radial-gradient(30% 30%, closest-corner, white, black);" + "c:-o-radial-gradient(center, 5em 40px, white, black);" + "d:-o-linear-gradient(bottom left, red 20px, yellow, green," + "blue 90%);" + "e:-o-repeating-linear-gradient(left, red 10%, blue 30%);" + "f:-o-repeating-radial-gradient(top left, circle, red, blue 10%," + "red 20%);" + "}"); } @Test public void testInternetExplorerGradients() throws Exception { testValid("div {" + "a:-ms-radial-gradient(-88px, -500px, #6A6A7A, #333, #000);" + "b:-ms-radial-gradient(30% 30%, closest-corner, white, black);" + "c:-ms-radial-gradient(center, 5em 40px, white, black);" + "d:-ms-linear-gradient(bottom left, red 20px, yellow, green," + "blue 90%);" + "e:-ms-repeating-linear-gradient(left, red 10%, blue 30%);" + "f:-ms-repeating-radial-gradient(top left, circle, red, blue 10%," + "red 20%);" + "}"); } @Test public void testKonquererGradients() throws Exception { // Taken from http://twitter.github.com/bootstrap/1.4.0/bootstrap.css testValid("div {" + "background-image: -khtml-gradient(linear, left top, left bottom, " + " from(#333333), to(#222222));" + "}"); } @Test public void testWebkitMinDevicePixelRatio() throws Exception { testValid("@media screen and (-webkit-min-device-pixel-ratio:0) {}"); } @Test public void testMediaQuery() throws Exception { testValid("@media screen and (max-height: 300px) and (min-width: 20px) {}"); } @Test public void testMediaQueryRatioNoSpaces() throws Exception { testValid("@media screen and (aspect-ratio: 3/4) {}"); } @Test public void testMediaQueryRatioWithSpaces() throws Exception { testValid("@media screen and (aspect-ratio: 3 / 4) {}"); } @Test public void testMediaQueryRatioWithManyLeadingSpaces() throws Exception { testValid("@media screen and (aspect-ratio: 3 / 4) {}"); } @Test public void testMediaQueryRatioWithTrailingSpaces() throws Exception { testValid("@media screen and (aspect-ratio: 3/ 4) {}"); } @Test public void testMediaQueryRatioWithNoTrailingSpaces() throws Exception { testValid("@media screen and (aspect-ratio: 3 /4) {}"); } @Test public void testMozLinearGradient() throws Exception { testValid(".CSS { background-image: " + "-moz-linear-gradient(bottom, #c0c0c0 0%, #dddddd 90%) }"); } @Test public void testParsingWebkitKeyframes1() throws Exception { testValid("@-webkit-keyframes bounce {\n" + " from {\n" + " left: 0px;\n" + " }\n" + " to {\n" + " left: 200px;\n" + " }\n" + "}\n"); } @Test public void testParsingMozKeyframes1() throws Exception { testValid("@-moz-keyframes bounce {\n" + " from {\n" + " left: 0px;\n" + " }\n" + " to {\n" + " left: 200px;\n" + " }\n" + "}\n"); } @Test public void testParsingWebkitKeyframes2() throws Exception { testValid("@-webkit-keyframes pulse {\n" + " 0% {\n" + " background-color: red;\n" + " opacity: 1.0;\n" + " -webkit-transform: scale(1.0) rotate(0deg);\n" + " }\n" + " 33.33% {\n" + " background-color: blue;\n" + " opacity: 0.75;\n" + " -webkit-transform: scale(1.1) rotate(-5deg);\n" + " }\n" + " 66.66% {\n" + " background-color: green;\n" + " opacity: 0.5;\n" + " -webkit-transform: scale(1.1) rotate(5deg);\n" + " }\n" + " 100% {\n" + " background-color: red;\n" + " opacity: 1.0;\n" + " -webkit-transform: scale(1.0) rotate(0deg);\n" + " }\n" + "}"); } @Test public void testParsingWebkitKeyframes3() throws Exception { testValid("@-webkit-keyframes bounce {\n" + " 0%, 51.2% {\n" + " left: 0px;\n" + " background: red;\n" + " }\n" + " 25%, 90.5% {\n" + " left: 200px;\n" + " background: green;\n" + " }\n" + " 25% {\n" + " background: blue;\n" + " }\n" + "}"); } @Test public void testParsingWebkitKeyframes4() throws Exception { testValid("@-webkit-keyframes from {}"); testValid("@-webkit-keyframes to {}"); testValid("from {}"); testValid("to {}"); } @Test public void testEscapingInDoubleQuoteString() throws Exception { testValid("body {content: \"\\0af9bcHH\"}"); testValid("body {content: \"\\0HH\"}"); testValid("body {content: \"\\aHH\"}"); testValid("body {content: \"\\gHH\"}"); testValid("body {content: \"\\\"'HH\"}"); } @Test public void testEscapingInSingleQuoteString() throws Exception { testValid("body {content: '\\0af9bcHH'}"); testValid("body {content: '\\0HH'}"); testValid("body {content: '\\aHH'}"); testValid("body {content: '\\gHH'}"); testValid("body {content: '\"\\'HH'}"); } @Test public void testPseudoFunction() throws Exception { testValid("div :lang(en) { color: #FFF; }"); testValid(":lang(fr) { color: #FFF; }"); } @Test public void testPseudoNth() throws Exception { testValid("div :nth-child(1n+1) { color: #FFF; }"); testValid("div :nth-child(n+1) { color: #FFF; }"); testValid("div :nth-child(+n+2) { color: #FFF; }"); testValid("div :nth-child(n-1) { color: #FFF; }"); testValid("div :nth-child(-n-1) { color: #FFF; }"); testValid("div :nth-child(+2n+3) { color: #FFF; }"); testValid("div :nth-child(-5n+1) { color: #FFF; }"); // just 'n' is not supported by WebKit yet testValid("div :nth-child(n) { color: #FFF; }"); testValid("div :nth-child(-n) { color: #FFF; }"); testValid("div :nth-child(+n) { color: #FFF; }"); testValid("div :nth-child(n-0) { color: #FFF; }"); testValid("div :nth-child(0n+0) { color: #FFF; }"); testValid("div :nth-child(1) { color: #FFF; }"); testValid("div :nth-child(+7) { color: #FFF; }"); testValid("div :nth-child(-9) { color: #FFF; }"); testValid("div :nth-child(odd) { color: #FFF; }"); testValid("div :nth-child(even) { color: #FFF; }"); } @Test public void testPseudoNot() throws Exception { testValid("p :not(.classy) { color: #123; }"); testValid("p :not(div) { color: #123; }"); testValid("p:not(div) { color: #123; }"); testValid("p :not( div ) { color: #123; }"); testValid("p :not(#id) { color: #123; }"); testValid("*:not(:link):not(:visited) {}"); } @Test public void testPseudoElements() throws Exception { testValid("p::first-line { text-transform: uppercase }"); testValid("p::first-letter { color: green; font-size: 200% }"); testValid("div::after { color: #123; }"); testValid("div::before { color: #123; }"); } @Test public void testOldPseudoElements() throws Exception { testValid("p:first-line { text-transform: uppercase }"); testValid("p:first-letter { color: green; font-size: 200% }"); testValid("div:after { color: #123; }"); testValid("div:before { color: #123; }"); } @Test public void testMixinDefinitions() throws Exception { testValid("@defmixin name(PAR1, PAR2) { prop1: PAR1; prop2: PAR2 }"); testValid("@defmixin name( PAR1 , PAR2 )" + "{ prop1: PAR1; prop2: PAR2 }"); testValid("@defmixin name(PAR1, PAR2) { prop1: PAR1; prop2: CONST; }"); } @Test public void testMixins() throws Exception { testValid("div { @mixin name(); }"); testValid("div { @mixin name( ) ; }"); testValid("div { prop1: val; @mixin defname(2px, #fff, 23%); }"); testValid("div { prop1: val; @mixin defname(); p:v;}"); testValid("div { @mixin foo(1px/1em); }"); testValid("div { @mixin foo(1px 1px); }"); } @Test public void testUnquotedUrl() throws Exception { testValid("div { background-image: url(http://google.com/logo.png) }"); } @Test public void testFunctionApplicationUrl() throws Exception { testValid("div { background-image: url(dataUrl('s')) }"); } @Test public void testUrlOfFunctionOfId() throws Exception { // Bare URLs in function arguments are deprecated, but // we have some dependent code to cleanup before removing // the feature. testValid("div { background-image: url(dataUrl(x)); }"); } @Test public void testFn() throws Exception { testValid("div { background-image: url(http://foo) }"); } @Test public void testUrlPrefix() throws Exception { testTree("div { background-image: url-prefix(http://fo); }", "[[div]{[background-image:[url-prefix(http://fo)];]}]"); } @Test public void testUrlPrefix2() throws Exception { testTree("div { background-image: url-prefix(fn(0)); }", "[[div]{[background-image:[url-prefix(fn(0))];]}]"); } @Test public void testEmptyUrl() throws Exception { testValid("div { background-image: url() }"); } @Test public void testUrlWithWhitespace() throws Exception { testTree("div { background-image: url( 'http://google.com/logo.png'); }", "[[div]{[background-image:" + "[url('http://google.com/logo.png')];]}]"); } @Test public void testUnquotedUrlWithWhitespace() throws Exception { testTree("div { background-image: url( http://google.com/logo.png); }", "[[div]{[background-image:" + "[url(http://google.com/logo.png)];]}]"); } @Test public void testCdoCdc() throws Exception { testTree( "<!--\ndiv { color: red; }\n-->", "[[div]{[color:[[red]];]}]"); } @Test public void testIntraPropertyCdoCdc() throws Exception { String css = ".foo{border:1px<!--solid-->blue;}"; try { parse(css); Assert.fail("CDO should not be accepted in property values."); } catch (GssParserException e) { assertWithMessage( "The error should reflect that CDO is not accepted in property " + "values.") .that(e.getGssError().getLocation().getBeginCharacterIndex()) .isEqualTo(css.indexOf("<!--")); } } @Test public void testMicrosoftListAtRule() throws Exception { // This is syntactically valid according to CSS3, so we should // be able to ignore the proprietary @list rule and not fail // the whole parse. String[] samples = new String[] { "@list l0\n" + "{mso-list-id:792754432;}\n" + "div { border: solid thin black }", "@list l0:level1\n" + "{mso-list-id:792754432;}\n" + "div { border: solid thin black }"}; for (String css : samples) { // no exceptions the first time CssTree t1 = parse(css); String output1 = CompactPrinter.printCompactly(t1); // also no exceptions the second time CssTree t2 = parse(output1); // and the we've reached a fixed point assertThat(AstPrinter.print(t2)).isEqualTo(AstPrinter.print(t1)); } } @Test public void testRunawayMicrosoftListAtRule() throws Exception { String[] samples = new String[] { // unterminated block "@list l0 {mso-list-id:792754432;", // unterminated nested paren "@list l0 {mso-list-id:792754432;(}", // improper nesting with parens "@list l0 {mso-list-id:792754432;(})", // unterminated block, unmatched open bracket "@list l0 {mso-list-id:792754432;[", // unterminated block, close bracket without matching open bracket "@list l0 {mso-list-id:792754432;]"}; for (String css : samples) { try { parse(css); Assert.fail("The compiler should only accept complete @list rules, not " + css); } catch (GssParserException e) { // expected } } } @Test public void testCustomBorderProperty() throws Exception { testTree( "a { border-height: 1em; }", "[[a]{[border-height:[[1em]];]}]"); testTree( "a { border-left-height: 1em; }", "[[a]{[border-left-height:[[1em]];]}]"); testTree( "a { border-right-height: 1em; }", "[[a]{[border-right-height:[[1em]];]}]"); testTree( "a { border-top-height: 1em; }", "[[a]{[border-top-height:[[1em]];]}]"); testTree( "a { border-bottom-height: 1em; }", "[[a]{[border-bottom-height:[[1em]];]}]"); } @Test public void testForLoop() throws Exception { testTree( "@for $i from 1 to 6 {}", "[@for [$i] [from] [1] [to] [6]{}]"); } @Test public void testForLoopWithStep() throws Exception { testTree( "@for $i from 1 to 6 step 2 {}", "[@for [$i] [from] [1] [to] [6] [step] [2]{}]"); } @Test public void testForLoopWithVariables() throws Exception { testTree( "@for $i from $x to $y step $z {}", "[@for [$i] [from] [$x] [to] [$y] [step] [$z]{}]"); } @Test public void testForLoopWithVariablesInBlock() throws Exception { testTree( "@for $i from 1 to 2 { .foo-$i { padding: $i } }", "[@for [$i] [from] [1] [to] [2]{[.foo-$i]{[padding:[[$i]];]}}]"); } @Test public void testComments() throws GssParserException { testTree("div {}/*comment*/", "[[div]{[]}]"); testTree("div {}/*comment*/p {}", "[[div]{[]}[p]{[]}]"); testTree("div {}/***comment**/p {}", "[[div]{[]}[p]{[]}]"); testTree("div {}/***c/o**m//m***e////nt**/p {}", "[[div]{[]}[p]{[]}]"); testTree("div {}/***c/o**m//m/***e////nt/***/p {}", "[[div]{[]}[p]{[]}]"); testTree("div {}/****************/p {}", "[[div]{[]}[p]{[]}]"); testTree("div {}/**/p {}", "[[div]{[]}[p]{[]}]"); testTree("div {}/**/p {}/**/", "[[div]{[]}[p]{[]}]"); testTree("div {}/**/p {}/**/div {}", "[[div]{[]}[p]{[]}[div]{[]}]"); } @Test public void testUnicodeRange() throws Exception { testValid("@font-face { unicode-range: U+26;}"); testValid("@font-face { unicode-range: U+0015-00FF;}"); testValid("@font-face { unicode-range: U+A015-C0FF;}"); testValid("@font-face { unicode-range: U+26??;}"); } @Test public void testCalc_simple_noUnits() throws Exception { testValid(".elem { width: calc(5*2) }"); testTree(".elem { width: calc(5*2) }", "[[.elem]{[width:[calc([[5]*[2]])];]}]"); } @Test public void testCalc_simple() throws Exception { testValid(".elem { width: calc(5px*2) }"); testTree(".elem { width: calc(5px*2) }", "[[.elem]{[width:[calc([[5px]*[2]])];]}]"); } @Test public void testCalc_simpleConstant() throws Exception { testValid("@def A 5px; .elem { width: calc(A*2) }"); testTree( "@def A 5px; .elem { width: calc(A*2) }", "[@def [A] [5px];[.elem]{[width:[calc([[A]*[2]])];]}]"); } @Test public void testCalc_complexConstant() throws Exception { testValid("@def A 5px+2; .elem { width: calc(A*2) }"); testTree( "@def A 5px; .elem { width: calc(A*2) }", "[@def [A] [5px];[.elem]{[width:[calc([[A]*[2]])];]}]"); } @Test public void testCalc_complexConstant_unaryOperator() throws Exception { testValid("@def A -5px; .elem { width: calc(A/2) }"); testTree( "@def A -5px; .elem { width: calc(A/2) }", "[@def [A] [-5px];[.elem]{[width:[calc([[A]/[2]])];]}]"); } @Test public void testCalc_withParenthesizedSums() throws Exception { testValid("p { width: calc(4 * (5px * 2)); }"); testTree( "p { width: calc(4 * (5px * 2)); }", "[[p]{[width:[calc([[4]*[([5px]*[2])]])];]}]"); } @Test public void testCalc_fourOperands() throws Exception { testValid("p { width: calc(4 + 5 + 6 + 7);}"); testTree( "p { width: calc(4 + 5 + 6 + 7);}", "[[p]{[width:[calc([[4] + [[5] + [[6] + [7]]]])];]}]"); } @Test public void testCalc_nestedConstant() throws Exception { testValid("@def A 5px; p { width: calc((A + 4) - (A * A)); }"); testTree( "@def A 5px; p { width: calc((A + 4) - (A * A)); }", "[@def [A] [5px];[p]{[width:[calc([[([A] + [4])] - [([A]*[A])]])];]}]"); } @Test public void testNumericNodeLocation() throws GssParserException { CssTree tree = new GssParser(new SourceCode(null, "div{width:99px;}")).parse(); final CssNumericNode[] resultHolder = new CssNumericNode[1]; tree.getVisitController() .startVisit( new DefaultTreeVisitor() { @Override public boolean enterValueNode(CssValueNode value) { if (value instanceof CssNumericNode) { assertThat(resultHolder[0]).isNull(); resultHolder[0] = (CssNumericNode) value; } return true; } }); assertThat(resultHolder[0]).isNotNull(); SourceCodeLocation location = resultHolder[0].getSourceCodeLocation(); assertThat(location.getEndCharacterIndex() - location.getBeginCharacterIndex()) .isEqualTo("99px".length()); } @Test public void testCustomDeclaration() throws GssParserException { testTree(":root { --test: 10px; }", "[[:root]{[--test:[[10px]];]}]"); } @Test(expected=GssParserException.class) public void testNoValueShouldFailCustomDeclaration() throws GssParserException { testValid(":root { --var:; }"); // We expect this to throw } @Test public void testCustomPropertyReferenceInCalc() throws GssParserException { testValid("div { width: calc(10px * var(--test)); }"); } @Test public void testDefaultValue() throws GssParserException { testValid(".class { width: var(--test, 20px); }"); } @Test public void testCalcInVarDefaultValue() throws GssParserException { testValid(".class { width: var(--test, calc(100% - 20px)); }"); } private CssTree parse(List<SourceCode> sources) throws GssParserException { GssParser parser = new GssParser(sources); return parser.parse(); } private CssTree parse(String gss) throws GssParserException { return parse(ImmutableList.of(new SourceCode("test", gss))); } }
googleapis/google-cloud-java
36,212
java-monitoring/google-cloud-monitoring/src/main/java/com/google/cloud/monitoring/v3/stub/NotificationChannelServiceStubSettings.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.monitoring.v3.stub; import static com.google.cloud.monitoring.v3.NotificationChannelServiceClient.ListNotificationChannelDescriptorsPagedResponse; import static com.google.cloud.monitoring.v3.NotificationChannelServiceClient.ListNotificationChannelsPagedResponse; import com.google.api.core.ApiFunction; import com.google.api.core.ApiFuture; import com.google.api.core.ObsoleteApi; import com.google.api.gax.core.GaxProperties; import com.google.api.gax.core.GoogleCredentialsProvider; import com.google.api.gax.core.InstantiatingExecutorProvider; import com.google.api.gax.grpc.GaxGrpcProperties; import com.google.api.gax.grpc.GrpcTransportChannel; import com.google.api.gax.grpc.InstantiatingGrpcChannelProvider; import com.google.api.gax.retrying.RetrySettings; import com.google.api.gax.rpc.ApiCallContext; import com.google.api.gax.rpc.ApiClientHeaderProvider; import com.google.api.gax.rpc.ClientContext; import com.google.api.gax.rpc.PageContext; import com.google.api.gax.rpc.PagedCallSettings; import com.google.api.gax.rpc.PagedListDescriptor; import com.google.api.gax.rpc.PagedListResponseFactory; import com.google.api.gax.rpc.StatusCode; import com.google.api.gax.rpc.StubSettings; import com.google.api.gax.rpc.TransportChannelProvider; import com.google.api.gax.rpc.UnaryCallSettings; import com.google.api.gax.rpc.UnaryCallable; import com.google.common.collect.ImmutableList; import com.google.common.collect.ImmutableMap; import com.google.common.collect.ImmutableSet; import com.google.common.collect.Lists; import com.google.monitoring.v3.CreateNotificationChannelRequest; import com.google.monitoring.v3.DeleteNotificationChannelRequest; import com.google.monitoring.v3.GetNotificationChannelDescriptorRequest; import com.google.monitoring.v3.GetNotificationChannelRequest; import com.google.monitoring.v3.GetNotificationChannelVerificationCodeRequest; import com.google.monitoring.v3.GetNotificationChannelVerificationCodeResponse; import com.google.monitoring.v3.ListNotificationChannelDescriptorsRequest; import com.google.monitoring.v3.ListNotificationChannelDescriptorsResponse; import com.google.monitoring.v3.ListNotificationChannelsRequest; import com.google.monitoring.v3.ListNotificationChannelsResponse; import com.google.monitoring.v3.NotificationChannel; import com.google.monitoring.v3.NotificationChannelDescriptor; import com.google.monitoring.v3.SendNotificationChannelVerificationCodeRequest; import com.google.monitoring.v3.UpdateNotificationChannelRequest; import com.google.monitoring.v3.VerifyNotificationChannelRequest; import com.google.protobuf.Empty; import java.io.IOException; import java.time.Duration; import java.util.List; import javax.annotation.Generated; // AUTO-GENERATED DOCUMENTATION AND CLASS. /** * Settings class to configure an instance of {@link NotificationChannelServiceStub}. * * <p>The default instance has everything set to sensible defaults: * * <ul> * <li>The default service address (monitoring.googleapis.com) and default port (443) are used. * <li>Credentials are acquired automatically through Application Default Credentials. * <li>Retries are configured for idempotent methods but not for non-idempotent methods. * </ul> * * <p>The builder of this class is recursive, so contained classes are themselves builders. When * build() is called, the tree of builders is called to create the complete settings object. * * <p>For example, to set the * [RetrySettings](https://cloud.google.com/java/docs/reference/gax/latest/com.google.api.gax.retrying.RetrySettings) * of getNotificationChannelDescriptor: * * <pre>{@code * // This snippet has been automatically generated and should be regarded as a code template only. * // It will require modifications to work: * // - It may require correct/in-range values for request initialization. * // - It may require specifying regional endpoints when creating the service client as shown in * // https://cloud.google.com/java/docs/setup#configure_endpoints_for_the_client_library * NotificationChannelServiceStubSettings.Builder notificationChannelServiceSettingsBuilder = * NotificationChannelServiceStubSettings.newBuilder(); * notificationChannelServiceSettingsBuilder * .getNotificationChannelDescriptorSettings() * .setRetrySettings( * notificationChannelServiceSettingsBuilder * .getNotificationChannelDescriptorSettings() * .getRetrySettings() * .toBuilder() * .setInitialRetryDelayDuration(Duration.ofSeconds(1)) * .setInitialRpcTimeoutDuration(Duration.ofSeconds(5)) * .setMaxAttempts(5) * .setMaxRetryDelayDuration(Duration.ofSeconds(30)) * .setMaxRpcTimeoutDuration(Duration.ofSeconds(60)) * .setRetryDelayMultiplier(1.3) * .setRpcTimeoutMultiplier(1.5) * .setTotalTimeoutDuration(Duration.ofSeconds(300)) * .build()); * NotificationChannelServiceStubSettings notificationChannelServiceSettings = * notificationChannelServiceSettingsBuilder.build(); * }</pre> * * Please refer to the [Client Side Retry * Guide](https://github.com/googleapis/google-cloud-java/blob/main/docs/client_retries.md) for * additional support in setting retries. */ @Generated("by gapic-generator-java") public class NotificationChannelServiceStubSettings extends StubSettings<NotificationChannelServiceStubSettings> { /** The default scopes of the service. */ private static final ImmutableList<String> DEFAULT_SERVICE_SCOPES = ImmutableList.<String>builder() .add("https://www.googleapis.com/auth/cloud-platform") .add("https://www.googleapis.com/auth/monitoring") .add("https://www.googleapis.com/auth/monitoring.read") .build(); private final PagedCallSettings< ListNotificationChannelDescriptorsRequest, ListNotificationChannelDescriptorsResponse, ListNotificationChannelDescriptorsPagedResponse> listNotificationChannelDescriptorsSettings; private final UnaryCallSettings< GetNotificationChannelDescriptorRequest, NotificationChannelDescriptor> getNotificationChannelDescriptorSettings; private final PagedCallSettings< ListNotificationChannelsRequest, ListNotificationChannelsResponse, ListNotificationChannelsPagedResponse> listNotificationChannelsSettings; private final UnaryCallSettings<GetNotificationChannelRequest, NotificationChannel> getNotificationChannelSettings; private final UnaryCallSettings<CreateNotificationChannelRequest, NotificationChannel> createNotificationChannelSettings; private final UnaryCallSettings<UpdateNotificationChannelRequest, NotificationChannel> updateNotificationChannelSettings; private final UnaryCallSettings<DeleteNotificationChannelRequest, Empty> deleteNotificationChannelSettings; private final UnaryCallSettings<SendNotificationChannelVerificationCodeRequest, Empty> sendNotificationChannelVerificationCodeSettings; private final UnaryCallSettings< GetNotificationChannelVerificationCodeRequest, GetNotificationChannelVerificationCodeResponse> getNotificationChannelVerificationCodeSettings; private final UnaryCallSettings<VerifyNotificationChannelRequest, NotificationChannel> verifyNotificationChannelSettings; private static final PagedListDescriptor< ListNotificationChannelDescriptorsRequest, ListNotificationChannelDescriptorsResponse, NotificationChannelDescriptor> LIST_NOTIFICATION_CHANNEL_DESCRIPTORS_PAGE_STR_DESC = new PagedListDescriptor< ListNotificationChannelDescriptorsRequest, ListNotificationChannelDescriptorsResponse, NotificationChannelDescriptor>() { @Override public String emptyToken() { return ""; } @Override public ListNotificationChannelDescriptorsRequest injectToken( ListNotificationChannelDescriptorsRequest payload, String token) { return ListNotificationChannelDescriptorsRequest.newBuilder(payload) .setPageToken(token) .build(); } @Override public ListNotificationChannelDescriptorsRequest injectPageSize( ListNotificationChannelDescriptorsRequest payload, int pageSize) { return ListNotificationChannelDescriptorsRequest.newBuilder(payload) .setPageSize(pageSize) .build(); } @Override public Integer extractPageSize(ListNotificationChannelDescriptorsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListNotificationChannelDescriptorsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<NotificationChannelDescriptor> extractResources( ListNotificationChannelDescriptorsResponse payload) { return payload.getChannelDescriptorsList(); } }; private static final PagedListDescriptor< ListNotificationChannelsRequest, ListNotificationChannelsResponse, NotificationChannel> LIST_NOTIFICATION_CHANNELS_PAGE_STR_DESC = new PagedListDescriptor< ListNotificationChannelsRequest, ListNotificationChannelsResponse, NotificationChannel>() { @Override public String emptyToken() { return ""; } @Override public ListNotificationChannelsRequest injectToken( ListNotificationChannelsRequest payload, String token) { return ListNotificationChannelsRequest.newBuilder(payload) .setPageToken(token) .build(); } @Override public ListNotificationChannelsRequest injectPageSize( ListNotificationChannelsRequest payload, int pageSize) { return ListNotificationChannelsRequest.newBuilder(payload) .setPageSize(pageSize) .build(); } @Override public Integer extractPageSize(ListNotificationChannelsRequest payload) { return payload.getPageSize(); } @Override public String extractNextToken(ListNotificationChannelsResponse payload) { return payload.getNextPageToken(); } @Override public Iterable<NotificationChannel> extractResources( ListNotificationChannelsResponse payload) { return payload.getNotificationChannelsList(); } }; private static final PagedListResponseFactory< ListNotificationChannelDescriptorsRequest, ListNotificationChannelDescriptorsResponse, ListNotificationChannelDescriptorsPagedResponse> LIST_NOTIFICATION_CHANNEL_DESCRIPTORS_PAGE_STR_FACT = new PagedListResponseFactory< ListNotificationChannelDescriptorsRequest, ListNotificationChannelDescriptorsResponse, ListNotificationChannelDescriptorsPagedResponse>() { @Override public ApiFuture<ListNotificationChannelDescriptorsPagedResponse> getFuturePagedResponse( UnaryCallable< ListNotificationChannelDescriptorsRequest, ListNotificationChannelDescriptorsResponse> callable, ListNotificationChannelDescriptorsRequest request, ApiCallContext context, ApiFuture<ListNotificationChannelDescriptorsResponse> futureResponse) { PageContext< ListNotificationChannelDescriptorsRequest, ListNotificationChannelDescriptorsResponse, NotificationChannelDescriptor> pageContext = PageContext.create( callable, LIST_NOTIFICATION_CHANNEL_DESCRIPTORS_PAGE_STR_DESC, request, context); return ListNotificationChannelDescriptorsPagedResponse.createAsync( pageContext, futureResponse); } }; private static final PagedListResponseFactory< ListNotificationChannelsRequest, ListNotificationChannelsResponse, ListNotificationChannelsPagedResponse> LIST_NOTIFICATION_CHANNELS_PAGE_STR_FACT = new PagedListResponseFactory< ListNotificationChannelsRequest, ListNotificationChannelsResponse, ListNotificationChannelsPagedResponse>() { @Override public ApiFuture<ListNotificationChannelsPagedResponse> getFuturePagedResponse( UnaryCallable<ListNotificationChannelsRequest, ListNotificationChannelsResponse> callable, ListNotificationChannelsRequest request, ApiCallContext context, ApiFuture<ListNotificationChannelsResponse> futureResponse) { PageContext< ListNotificationChannelsRequest, ListNotificationChannelsResponse, NotificationChannel> pageContext = PageContext.create( callable, LIST_NOTIFICATION_CHANNELS_PAGE_STR_DESC, request, context); return ListNotificationChannelsPagedResponse.createAsync(pageContext, futureResponse); } }; /** Returns the object with the settings used for calls to listNotificationChannelDescriptors. */ public PagedCallSettings< ListNotificationChannelDescriptorsRequest, ListNotificationChannelDescriptorsResponse, ListNotificationChannelDescriptorsPagedResponse> listNotificationChannelDescriptorsSettings() { return listNotificationChannelDescriptorsSettings; } /** Returns the object with the settings used for calls to getNotificationChannelDescriptor. */ public UnaryCallSettings<GetNotificationChannelDescriptorRequest, NotificationChannelDescriptor> getNotificationChannelDescriptorSettings() { return getNotificationChannelDescriptorSettings; } /** Returns the object with the settings used for calls to listNotificationChannels. */ public PagedCallSettings< ListNotificationChannelsRequest, ListNotificationChannelsResponse, ListNotificationChannelsPagedResponse> listNotificationChannelsSettings() { return listNotificationChannelsSettings; } /** Returns the object with the settings used for calls to getNotificationChannel. */ public UnaryCallSettings<GetNotificationChannelRequest, NotificationChannel> getNotificationChannelSettings() { return getNotificationChannelSettings; } /** Returns the object with the settings used for calls to createNotificationChannel. */ public UnaryCallSettings<CreateNotificationChannelRequest, NotificationChannel> createNotificationChannelSettings() { return createNotificationChannelSettings; } /** Returns the object with the settings used for calls to updateNotificationChannel. */ public UnaryCallSettings<UpdateNotificationChannelRequest, NotificationChannel> updateNotificationChannelSettings() { return updateNotificationChannelSettings; } /** Returns the object with the settings used for calls to deleteNotificationChannel. */ public UnaryCallSettings<DeleteNotificationChannelRequest, Empty> deleteNotificationChannelSettings() { return deleteNotificationChannelSettings; } /** * Returns the object with the settings used for calls to sendNotificationChannelVerificationCode. */ public UnaryCallSettings<SendNotificationChannelVerificationCodeRequest, Empty> sendNotificationChannelVerificationCodeSettings() { return sendNotificationChannelVerificationCodeSettings; } /** * Returns the object with the settings used for calls to getNotificationChannelVerificationCode. */ public UnaryCallSettings< GetNotificationChannelVerificationCodeRequest, GetNotificationChannelVerificationCodeResponse> getNotificationChannelVerificationCodeSettings() { return getNotificationChannelVerificationCodeSettings; } /** Returns the object with the settings used for calls to verifyNotificationChannel. */ public UnaryCallSettings<VerifyNotificationChannelRequest, NotificationChannel> verifyNotificationChannelSettings() { return verifyNotificationChannelSettings; } public NotificationChannelServiceStub createStub() throws IOException { if (getTransportChannelProvider() .getTransportName() .equals(GrpcTransportChannel.getGrpcTransportName())) { return GrpcNotificationChannelServiceStub.create(this); } throw new UnsupportedOperationException( String.format( "Transport not supported: %s", getTransportChannelProvider().getTransportName())); } /** Returns the default service name. */ @Override public String getServiceName() { return "monitoring"; } /** Returns a builder for the default ExecutorProvider for this service. */ public static InstantiatingExecutorProvider.Builder defaultExecutorProviderBuilder() { return InstantiatingExecutorProvider.newBuilder(); } /** Returns the default service endpoint. */ @ObsoleteApi("Use getEndpoint() instead") public static String getDefaultEndpoint() { return "monitoring.googleapis.com:443"; } /** Returns the default mTLS service endpoint. */ public static String getDefaultMtlsEndpoint() { return "monitoring.mtls.googleapis.com:443"; } /** Returns the default service scopes. */ public static List<String> getDefaultServiceScopes() { return DEFAULT_SERVICE_SCOPES; } /** Returns a builder for the default credentials for this service. */ public static GoogleCredentialsProvider.Builder defaultCredentialsProviderBuilder() { return GoogleCredentialsProvider.newBuilder() .setScopesToApply(DEFAULT_SERVICE_SCOPES) .setUseJwtAccessWithScope(true); } /** Returns a builder for the default ChannelProvider for this service. */ public static InstantiatingGrpcChannelProvider.Builder defaultGrpcTransportProviderBuilder() { return InstantiatingGrpcChannelProvider.newBuilder() .setMaxInboundMessageSize(Integer.MAX_VALUE); } public static TransportChannelProvider defaultTransportChannelProvider() { return defaultGrpcTransportProviderBuilder().build(); } public static ApiClientHeaderProvider.Builder defaultApiClientHeaderProviderBuilder() { return ApiClientHeaderProvider.newBuilder() .setGeneratedLibToken( "gapic", GaxProperties.getLibraryVersion(NotificationChannelServiceStubSettings.class)) .setTransportToken( GaxGrpcProperties.getGrpcTokenName(), GaxGrpcProperties.getGrpcVersion()); } /** Returns a new builder for this class. */ public static Builder newBuilder() { return Builder.createDefault(); } /** Returns a new builder for this class. */ public static Builder newBuilder(ClientContext clientContext) { return new Builder(clientContext); } /** Returns a builder containing all the values of this settings class. */ public Builder toBuilder() { return new Builder(this); } protected NotificationChannelServiceStubSettings(Builder settingsBuilder) throws IOException { super(settingsBuilder); listNotificationChannelDescriptorsSettings = settingsBuilder.listNotificationChannelDescriptorsSettings().build(); getNotificationChannelDescriptorSettings = settingsBuilder.getNotificationChannelDescriptorSettings().build(); listNotificationChannelsSettings = settingsBuilder.listNotificationChannelsSettings().build(); getNotificationChannelSettings = settingsBuilder.getNotificationChannelSettings().build(); createNotificationChannelSettings = settingsBuilder.createNotificationChannelSettings().build(); updateNotificationChannelSettings = settingsBuilder.updateNotificationChannelSettings().build(); deleteNotificationChannelSettings = settingsBuilder.deleteNotificationChannelSettings().build(); sendNotificationChannelVerificationCodeSettings = settingsBuilder.sendNotificationChannelVerificationCodeSettings().build(); getNotificationChannelVerificationCodeSettings = settingsBuilder.getNotificationChannelVerificationCodeSettings().build(); verifyNotificationChannelSettings = settingsBuilder.verifyNotificationChannelSettings().build(); } /** Builder for NotificationChannelServiceStubSettings. */ public static class Builder extends StubSettings.Builder<NotificationChannelServiceStubSettings, Builder> { private final ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders; private final PagedCallSettings.Builder< ListNotificationChannelDescriptorsRequest, ListNotificationChannelDescriptorsResponse, ListNotificationChannelDescriptorsPagedResponse> listNotificationChannelDescriptorsSettings; private final UnaryCallSettings.Builder< GetNotificationChannelDescriptorRequest, NotificationChannelDescriptor> getNotificationChannelDescriptorSettings; private final PagedCallSettings.Builder< ListNotificationChannelsRequest, ListNotificationChannelsResponse, ListNotificationChannelsPagedResponse> listNotificationChannelsSettings; private final UnaryCallSettings.Builder<GetNotificationChannelRequest, NotificationChannel> getNotificationChannelSettings; private final UnaryCallSettings.Builder<CreateNotificationChannelRequest, NotificationChannel> createNotificationChannelSettings; private final UnaryCallSettings.Builder<UpdateNotificationChannelRequest, NotificationChannel> updateNotificationChannelSettings; private final UnaryCallSettings.Builder<DeleteNotificationChannelRequest, Empty> deleteNotificationChannelSettings; private final UnaryCallSettings.Builder<SendNotificationChannelVerificationCodeRequest, Empty> sendNotificationChannelVerificationCodeSettings; private final UnaryCallSettings.Builder< GetNotificationChannelVerificationCodeRequest, GetNotificationChannelVerificationCodeResponse> getNotificationChannelVerificationCodeSettings; private final UnaryCallSettings.Builder<VerifyNotificationChannelRequest, NotificationChannel> verifyNotificationChannelSettings; private static final ImmutableMap<String, ImmutableSet<StatusCode.Code>> RETRYABLE_CODE_DEFINITIONS; static { ImmutableMap.Builder<String, ImmutableSet<StatusCode.Code>> definitions = ImmutableMap.builder(); definitions.put( "retry_policy_5_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList(StatusCode.Code.UNAVAILABLE))); definitions.put( "no_retry_12_codes", ImmutableSet.copyOf(Lists.<StatusCode.Code>newArrayList())); RETRYABLE_CODE_DEFINITIONS = definitions.build(); } private static final ImmutableMap<String, RetrySettings> RETRY_PARAM_DEFINITIONS; static { ImmutableMap.Builder<String, RetrySettings> definitions = ImmutableMap.builder(); RetrySettings settings = null; settings = RetrySettings.newBuilder() .setInitialRetryDelayDuration(Duration.ofMillis(100L)) .setRetryDelayMultiplier(1.3) .setMaxRetryDelayDuration(Duration.ofMillis(30000L)) .setInitialRpcTimeoutDuration(Duration.ofMillis(30000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ofMillis(30000L)) .setTotalTimeoutDuration(Duration.ofMillis(30000L)) .build(); definitions.put("retry_policy_5_params", settings); settings = RetrySettings.newBuilder() .setInitialRpcTimeoutDuration(Duration.ofMillis(30000L)) .setRpcTimeoutMultiplier(1.0) .setMaxRpcTimeoutDuration(Duration.ofMillis(30000L)) .setTotalTimeoutDuration(Duration.ofMillis(30000L)) .build(); definitions.put("no_retry_12_params", settings); RETRY_PARAM_DEFINITIONS = definitions.build(); } protected Builder() { this(((ClientContext) null)); } protected Builder(ClientContext clientContext) { super(clientContext); listNotificationChannelDescriptorsSettings = PagedCallSettings.newBuilder(LIST_NOTIFICATION_CHANNEL_DESCRIPTORS_PAGE_STR_FACT); getNotificationChannelDescriptorSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); listNotificationChannelsSettings = PagedCallSettings.newBuilder(LIST_NOTIFICATION_CHANNELS_PAGE_STR_FACT); getNotificationChannelSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); createNotificationChannelSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); updateNotificationChannelSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); deleteNotificationChannelSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); sendNotificationChannelVerificationCodeSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); getNotificationChannelVerificationCodeSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); verifyNotificationChannelSettings = UnaryCallSettings.newUnaryCallSettingsBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( listNotificationChannelDescriptorsSettings, getNotificationChannelDescriptorSettings, listNotificationChannelsSettings, getNotificationChannelSettings, createNotificationChannelSettings, updateNotificationChannelSettings, deleteNotificationChannelSettings, sendNotificationChannelVerificationCodeSettings, getNotificationChannelVerificationCodeSettings, verifyNotificationChannelSettings); initDefaults(this); } protected Builder(NotificationChannelServiceStubSettings settings) { super(settings); listNotificationChannelDescriptorsSettings = settings.listNotificationChannelDescriptorsSettings.toBuilder(); getNotificationChannelDescriptorSettings = settings.getNotificationChannelDescriptorSettings.toBuilder(); listNotificationChannelsSettings = settings.listNotificationChannelsSettings.toBuilder(); getNotificationChannelSettings = settings.getNotificationChannelSettings.toBuilder(); createNotificationChannelSettings = settings.createNotificationChannelSettings.toBuilder(); updateNotificationChannelSettings = settings.updateNotificationChannelSettings.toBuilder(); deleteNotificationChannelSettings = settings.deleteNotificationChannelSettings.toBuilder(); sendNotificationChannelVerificationCodeSettings = settings.sendNotificationChannelVerificationCodeSettings.toBuilder(); getNotificationChannelVerificationCodeSettings = settings.getNotificationChannelVerificationCodeSettings.toBuilder(); verifyNotificationChannelSettings = settings.verifyNotificationChannelSettings.toBuilder(); unaryMethodSettingsBuilders = ImmutableList.<UnaryCallSettings.Builder<?, ?>>of( listNotificationChannelDescriptorsSettings, getNotificationChannelDescriptorSettings, listNotificationChannelsSettings, getNotificationChannelSettings, createNotificationChannelSettings, updateNotificationChannelSettings, deleteNotificationChannelSettings, sendNotificationChannelVerificationCodeSettings, getNotificationChannelVerificationCodeSettings, verifyNotificationChannelSettings); } private static Builder createDefault() { Builder builder = new Builder(((ClientContext) null)); builder.setTransportChannelProvider(defaultTransportChannelProvider()); builder.setCredentialsProvider(defaultCredentialsProviderBuilder().build()); builder.setInternalHeaderProvider(defaultApiClientHeaderProviderBuilder().build()); builder.setMtlsEndpoint(getDefaultMtlsEndpoint()); builder.setSwitchToMtlsEndpointAllowed(true); return initDefaults(builder); } private static Builder initDefaults(Builder builder) { builder .listNotificationChannelDescriptorsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_5_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_5_params")); builder .getNotificationChannelDescriptorSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_5_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_5_params")); builder .listNotificationChannelsSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_5_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_5_params")); builder .getNotificationChannelSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_5_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_5_params")); builder .createNotificationChannelSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_12_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_12_params")); builder .updateNotificationChannelSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_12_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_12_params")); builder .deleteNotificationChannelSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_5_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_5_params")); builder .sendNotificationChannelVerificationCodeSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("no_retry_12_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("no_retry_12_params")); builder .getNotificationChannelVerificationCodeSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_5_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_5_params")); builder .verifyNotificationChannelSettings() .setRetryableCodes(RETRYABLE_CODE_DEFINITIONS.get("retry_policy_5_codes")) .setRetrySettings(RETRY_PARAM_DEFINITIONS.get("retry_policy_5_params")); return builder; } /** * Applies the given settings updater function to all of the unary API methods in this service. * * <p>Note: This method does not support applying settings to streaming methods. */ public Builder applyToAllUnaryMethods( ApiFunction<UnaryCallSettings.Builder<?, ?>, Void> settingsUpdater) { super.applyToAllUnaryMethods(unaryMethodSettingsBuilders, settingsUpdater); return this; } public ImmutableList<UnaryCallSettings.Builder<?, ?>> unaryMethodSettingsBuilders() { return unaryMethodSettingsBuilders; } /** * Returns the builder for the settings used for calls to listNotificationChannelDescriptors. */ public PagedCallSettings.Builder< ListNotificationChannelDescriptorsRequest, ListNotificationChannelDescriptorsResponse, ListNotificationChannelDescriptorsPagedResponse> listNotificationChannelDescriptorsSettings() { return listNotificationChannelDescriptorsSettings; } /** Returns the builder for the settings used for calls to getNotificationChannelDescriptor. */ public UnaryCallSettings.Builder< GetNotificationChannelDescriptorRequest, NotificationChannelDescriptor> getNotificationChannelDescriptorSettings() { return getNotificationChannelDescriptorSettings; } /** Returns the builder for the settings used for calls to listNotificationChannels. */ public PagedCallSettings.Builder< ListNotificationChannelsRequest, ListNotificationChannelsResponse, ListNotificationChannelsPagedResponse> listNotificationChannelsSettings() { return listNotificationChannelsSettings; } /** Returns the builder for the settings used for calls to getNotificationChannel. */ public UnaryCallSettings.Builder<GetNotificationChannelRequest, NotificationChannel> getNotificationChannelSettings() { return getNotificationChannelSettings; } /** Returns the builder for the settings used for calls to createNotificationChannel. */ public UnaryCallSettings.Builder<CreateNotificationChannelRequest, NotificationChannel> createNotificationChannelSettings() { return createNotificationChannelSettings; } /** Returns the builder for the settings used for calls to updateNotificationChannel. */ public UnaryCallSettings.Builder<UpdateNotificationChannelRequest, NotificationChannel> updateNotificationChannelSettings() { return updateNotificationChannelSettings; } /** Returns the builder for the settings used for calls to deleteNotificationChannel. */ public UnaryCallSettings.Builder<DeleteNotificationChannelRequest, Empty> deleteNotificationChannelSettings() { return deleteNotificationChannelSettings; } /** * Returns the builder for the settings used for calls to * sendNotificationChannelVerificationCode. */ public UnaryCallSettings.Builder<SendNotificationChannelVerificationCodeRequest, Empty> sendNotificationChannelVerificationCodeSettings() { return sendNotificationChannelVerificationCodeSettings; } /** * Returns the builder for the settings used for calls to * getNotificationChannelVerificationCode. */ public UnaryCallSettings.Builder< GetNotificationChannelVerificationCodeRequest, GetNotificationChannelVerificationCodeResponse> getNotificationChannelVerificationCodeSettings() { return getNotificationChannelVerificationCodeSettings; } /** Returns the builder for the settings used for calls to verifyNotificationChannel. */ public UnaryCallSettings.Builder<VerifyNotificationChannelRequest, NotificationChannel> verifyNotificationChannelSettings() { return verifyNotificationChannelSettings; } @Override public NotificationChannelServiceStubSettings build() throws IOException { return new NotificationChannelServiceStubSettings(this); } } }
googleapis/google-cloud-java
35,914
java-grafeas/src/main/java/io/grafeas/v1/UpdateOccurrenceRequest.java
/* * Copyright 2025 The Grafeas Authors. All rights reserved. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: grafeas/v1/grafeas.proto // Protobuf Java Version: 3.25.8 package io.grafeas.v1; /** * * * <pre> * Request to update an occurrence. * </pre> * * Protobuf type {@code grafeas.v1.UpdateOccurrenceRequest} */ public final class UpdateOccurrenceRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:grafeas.v1.UpdateOccurrenceRequest) UpdateOccurrenceRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateOccurrenceRequest.newBuilder() to construct. private UpdateOccurrenceRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateOccurrenceRequest() { name_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateOccurrenceRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return io.grafeas.v1.GrafeasOuterClass .internal_static_grafeas_v1_UpdateOccurrenceRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return io.grafeas.v1.GrafeasOuterClass .internal_static_grafeas_v1_UpdateOccurrenceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( io.grafeas.v1.UpdateOccurrenceRequest.class, io.grafeas.v1.UpdateOccurrenceRequest.Builder.class); } private int bitField0_; public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * The name of the occurrence in the form of * `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * The name of the occurrence in the form of * `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int OCCURRENCE_FIELD_NUMBER = 2; private io.grafeas.v1.Occurrence occurrence_; /** * * * <pre> * The updated occurrence. * </pre> * * <code>.grafeas.v1.Occurrence occurrence = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return Whether the occurrence field is set. */ @java.lang.Override public boolean hasOccurrence() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * The updated occurrence. * </pre> * * <code>.grafeas.v1.Occurrence occurrence = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The occurrence. */ @java.lang.Override public io.grafeas.v1.Occurrence getOccurrence() { return occurrence_ == null ? io.grafeas.v1.Occurrence.getDefaultInstance() : occurrence_; } /** * * * <pre> * The updated occurrence. * </pre> * * <code>.grafeas.v1.Occurrence occurrence = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ @java.lang.Override public io.grafeas.v1.OccurrenceOrBuilder getOccurrenceOrBuilder() { return occurrence_ == null ? io.grafeas.v1.Occurrence.getDefaultInstance() : occurrence_; } public static final int UPDATE_MASK_FIELD_NUMBER = 3; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getOccurrence()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(3, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getOccurrence()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof io.grafeas.v1.UpdateOccurrenceRequest)) { return super.equals(obj); } io.grafeas.v1.UpdateOccurrenceRequest other = (io.grafeas.v1.UpdateOccurrenceRequest) obj; if (!getName().equals(other.getName())) return false; if (hasOccurrence() != other.hasOccurrence()) return false; if (hasOccurrence()) { if (!getOccurrence().equals(other.getOccurrence())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); if (hasOccurrence()) { hash = (37 * hash) + OCCURRENCE_FIELD_NUMBER; hash = (53 * hash) + getOccurrence().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static io.grafeas.v1.UpdateOccurrenceRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static io.grafeas.v1.UpdateOccurrenceRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static io.grafeas.v1.UpdateOccurrenceRequest parseFrom(com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static io.grafeas.v1.UpdateOccurrenceRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static io.grafeas.v1.UpdateOccurrenceRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static io.grafeas.v1.UpdateOccurrenceRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static io.grafeas.v1.UpdateOccurrenceRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static io.grafeas.v1.UpdateOccurrenceRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static io.grafeas.v1.UpdateOccurrenceRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static io.grafeas.v1.UpdateOccurrenceRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static io.grafeas.v1.UpdateOccurrenceRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static io.grafeas.v1.UpdateOccurrenceRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(io.grafeas.v1.UpdateOccurrenceRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request to update an occurrence. * </pre> * * Protobuf type {@code grafeas.v1.UpdateOccurrenceRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:grafeas.v1.UpdateOccurrenceRequest) io.grafeas.v1.UpdateOccurrenceRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return io.grafeas.v1.GrafeasOuterClass .internal_static_grafeas_v1_UpdateOccurrenceRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return io.grafeas.v1.GrafeasOuterClass .internal_static_grafeas_v1_UpdateOccurrenceRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( io.grafeas.v1.UpdateOccurrenceRequest.class, io.grafeas.v1.UpdateOccurrenceRequest.Builder.class); } // Construct using io.grafeas.v1.UpdateOccurrenceRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getOccurrenceFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; occurrence_ = null; if (occurrenceBuilder_ != null) { occurrenceBuilder_.dispose(); occurrenceBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return io.grafeas.v1.GrafeasOuterClass .internal_static_grafeas_v1_UpdateOccurrenceRequest_descriptor; } @java.lang.Override public io.grafeas.v1.UpdateOccurrenceRequest getDefaultInstanceForType() { return io.grafeas.v1.UpdateOccurrenceRequest.getDefaultInstance(); } @java.lang.Override public io.grafeas.v1.UpdateOccurrenceRequest build() { io.grafeas.v1.UpdateOccurrenceRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public io.grafeas.v1.UpdateOccurrenceRequest buildPartial() { io.grafeas.v1.UpdateOccurrenceRequest result = new io.grafeas.v1.UpdateOccurrenceRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(io.grafeas.v1.UpdateOccurrenceRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; } int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000002) != 0)) { result.occurrence_ = occurrenceBuilder_ == null ? occurrence_ : occurrenceBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000004) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof io.grafeas.v1.UpdateOccurrenceRequest) { return mergeFrom((io.grafeas.v1.UpdateOccurrenceRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(io.grafeas.v1.UpdateOccurrenceRequest other) { if (other == io.grafeas.v1.UpdateOccurrenceRequest.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (other.hasOccurrence()) { mergeOccurrence(other.getOccurrence()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getOccurrenceFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 case 26: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000004; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * The name of the occurrence in the form of * `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The name of the occurrence in the form of * `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The name of the occurrence in the form of * `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The name of the occurrence in the form of * `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * The name of the occurrence in the form of * `projects/[PROJECT_ID]/occurrences/[OCCURRENCE_ID]`. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private io.grafeas.v1.Occurrence occurrence_; private com.google.protobuf.SingleFieldBuilderV3< io.grafeas.v1.Occurrence, io.grafeas.v1.Occurrence.Builder, io.grafeas.v1.OccurrenceOrBuilder> occurrenceBuilder_; /** * * * <pre> * The updated occurrence. * </pre> * * <code>.grafeas.v1.Occurrence occurrence = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return Whether the occurrence field is set. */ public boolean hasOccurrence() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * The updated occurrence. * </pre> * * <code>.grafeas.v1.Occurrence occurrence = 2 [(.google.api.field_behavior) = REQUIRED];</code> * * @return The occurrence. */ public io.grafeas.v1.Occurrence getOccurrence() { if (occurrenceBuilder_ == null) { return occurrence_ == null ? io.grafeas.v1.Occurrence.getDefaultInstance() : occurrence_; } else { return occurrenceBuilder_.getMessage(); } } /** * * * <pre> * The updated occurrence. * </pre> * * <code>.grafeas.v1.Occurrence occurrence = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder setOccurrence(io.grafeas.v1.Occurrence value) { if (occurrenceBuilder_ == null) { if (value == null) { throw new NullPointerException(); } occurrence_ = value; } else { occurrenceBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The updated occurrence. * </pre> * * <code>.grafeas.v1.Occurrence occurrence = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder setOccurrence(io.grafeas.v1.Occurrence.Builder builderForValue) { if (occurrenceBuilder_ == null) { occurrence_ = builderForValue.build(); } else { occurrenceBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The updated occurrence. * </pre> * * <code>.grafeas.v1.Occurrence occurrence = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder mergeOccurrence(io.grafeas.v1.Occurrence value) { if (occurrenceBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && occurrence_ != null && occurrence_ != io.grafeas.v1.Occurrence.getDefaultInstance()) { getOccurrenceBuilder().mergeFrom(value); } else { occurrence_ = value; } } else { occurrenceBuilder_.mergeFrom(value); } if (occurrence_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * The updated occurrence. * </pre> * * <code>.grafeas.v1.Occurrence occurrence = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public Builder clearOccurrence() { bitField0_ = (bitField0_ & ~0x00000002); occurrence_ = null; if (occurrenceBuilder_ != null) { occurrenceBuilder_.dispose(); occurrenceBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The updated occurrence. * </pre> * * <code>.grafeas.v1.Occurrence occurrence = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public io.grafeas.v1.Occurrence.Builder getOccurrenceBuilder() { bitField0_ |= 0x00000002; onChanged(); return getOccurrenceFieldBuilder().getBuilder(); } /** * * * <pre> * The updated occurrence. * </pre> * * <code>.grafeas.v1.Occurrence occurrence = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ public io.grafeas.v1.OccurrenceOrBuilder getOccurrenceOrBuilder() { if (occurrenceBuilder_ != null) { return occurrenceBuilder_.getMessageOrBuilder(); } else { return occurrence_ == null ? io.grafeas.v1.Occurrence.getDefaultInstance() : occurrence_; } } /** * * * <pre> * The updated occurrence. * </pre> * * <code>.grafeas.v1.Occurrence occurrence = 2 [(.google.api.field_behavior) = REQUIRED];</code> */ private com.google.protobuf.SingleFieldBuilderV3< io.grafeas.v1.Occurrence, io.grafeas.v1.Occurrence.Builder, io.grafeas.v1.OccurrenceOrBuilder> getOccurrenceFieldBuilder() { if (occurrenceBuilder_ == null) { occurrenceBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< io.grafeas.v1.Occurrence, io.grafeas.v1.Occurrence.Builder, io.grafeas.v1.OccurrenceOrBuilder>( getOccurrence(), getParentForChildren(), isClean()); occurrence_ = null; } return occurrenceBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000004) != 0); } /** * * * <pre> * The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000004) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000004; onChanged(); } return this; } /** * * * <pre> * The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000004); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000004; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * The fields to update. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:grafeas.v1.UpdateOccurrenceRequest) } // @@protoc_insertion_point(class_scope:grafeas.v1.UpdateOccurrenceRequest) private static final io.grafeas.v1.UpdateOccurrenceRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new io.grafeas.v1.UpdateOccurrenceRequest(); } public static io.grafeas.v1.UpdateOccurrenceRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateOccurrenceRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateOccurrenceRequest>() { @java.lang.Override public UpdateOccurrenceRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateOccurrenceRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateOccurrenceRequest> getParserForType() { return PARSER; } @java.lang.Override public io.grafeas.v1.UpdateOccurrenceRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,948
java-discoveryengine/proto-google-cloud-discoveryengine-v1alpha/src/main/java/com/google/cloud/discoveryengine/v1alpha/UpdateSessionRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/discoveryengine/v1alpha/conversational_search_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.discoveryengine.v1alpha; /** * * * <pre> * Request for UpdateSession method. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1alpha.UpdateSessionRequest} */ public final class UpdateSessionRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.discoveryengine.v1alpha.UpdateSessionRequest) UpdateSessionRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateSessionRequest.newBuilder() to construct. private UpdateSessionRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateSessionRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateSessionRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1alpha.ConversationalSearchServiceProto .internal_static_google_cloud_discoveryengine_v1alpha_UpdateSessionRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1alpha.ConversationalSearchServiceProto .internal_static_google_cloud_discoveryengine_v1alpha_UpdateSessionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest.class, com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest.Builder.class); } private int bitField0_; public static final int SESSION_FIELD_NUMBER = 1; private com.google.cloud.discoveryengine.v1alpha.Session session_; /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1alpha.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the session field is set. */ @java.lang.Override public boolean hasSession() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1alpha.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The session. */ @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.Session getSession() { return session_ == null ? com.google.cloud.discoveryengine.v1alpha.Session.getDefaultInstance() : session_; } /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1alpha.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.SessionOrBuilder getSessionOrBuilder() { return session_ == null ? com.google.cloud.discoveryengine.v1alpha.Session.getDefaultInstance() : session_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1alpha.Session] to update. The * following are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1alpha.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1alpha.Session] to update. The * following are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1alpha.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1alpha.Session] to update. The * following are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1alpha.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getSession()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getSession()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest)) { return super.equals(obj); } com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest other = (com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest) obj; if (hasSession() != other.hasSession()) return false; if (hasSession()) { if (!getSession().equals(other.getSession())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasSession()) { hash = (37 * hash) + SESSION_FIELD_NUMBER; hash = (53 * hash) + getSession().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request for UpdateSession method. * </pre> * * Protobuf type {@code google.cloud.discoveryengine.v1alpha.UpdateSessionRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.discoveryengine.v1alpha.UpdateSessionRequest) com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.discoveryengine.v1alpha.ConversationalSearchServiceProto .internal_static_google_cloud_discoveryengine_v1alpha_UpdateSessionRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.discoveryengine.v1alpha.ConversationalSearchServiceProto .internal_static_google_cloud_discoveryengine_v1alpha_UpdateSessionRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest.class, com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest.Builder.class); } // Construct using com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getSessionFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; session_ = null; if (sessionBuilder_ != null) { sessionBuilder_.dispose(); sessionBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.discoveryengine.v1alpha.ConversationalSearchServiceProto .internal_static_google_cloud_discoveryengine_v1alpha_UpdateSessionRequest_descriptor; } @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest getDefaultInstanceForType() { return com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest build() { com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest buildPartial() { com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest result = new com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.session_ = sessionBuilder_ == null ? session_ : sessionBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest) { return mergeFrom((com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest other) { if (other == com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest.getDefaultInstance()) return this; if (other.hasSession()) { mergeSession(other.getSession()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getSessionFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.discoveryengine.v1alpha.Session session_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.discoveryengine.v1alpha.Session, com.google.cloud.discoveryengine.v1alpha.Session.Builder, com.google.cloud.discoveryengine.v1alpha.SessionOrBuilder> sessionBuilder_; /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1alpha.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the session field is set. */ public boolean hasSession() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1alpha.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The session. */ public com.google.cloud.discoveryengine.v1alpha.Session getSession() { if (sessionBuilder_ == null) { return session_ == null ? com.google.cloud.discoveryengine.v1alpha.Session.getDefaultInstance() : session_; } else { return sessionBuilder_.getMessage(); } } /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1alpha.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setSession(com.google.cloud.discoveryengine.v1alpha.Session value) { if (sessionBuilder_ == null) { if (value == null) { throw new NullPointerException(); } session_ = value; } else { sessionBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1alpha.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setSession( com.google.cloud.discoveryengine.v1alpha.Session.Builder builderForValue) { if (sessionBuilder_ == null) { session_ = builderForValue.build(); } else { sessionBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1alpha.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeSession(com.google.cloud.discoveryengine.v1alpha.Session value) { if (sessionBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && session_ != null && session_ != com.google.cloud.discoveryengine.v1alpha.Session.getDefaultInstance()) { getSessionBuilder().mergeFrom(value); } else { session_ = value; } } else { sessionBuilder_.mergeFrom(value); } if (session_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1alpha.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearSession() { bitField0_ = (bitField0_ & ~0x00000001); session_ = null; if (sessionBuilder_ != null) { sessionBuilder_.dispose(); sessionBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1alpha.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.discoveryengine.v1alpha.Session.Builder getSessionBuilder() { bitField0_ |= 0x00000001; onChanged(); return getSessionFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1alpha.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.discoveryengine.v1alpha.SessionOrBuilder getSessionOrBuilder() { if (sessionBuilder_ != null) { return sessionBuilder_.getMessageOrBuilder(); } else { return session_ == null ? com.google.cloud.discoveryengine.v1alpha.Session.getDefaultInstance() : session_; } } /** * * * <pre> * Required. The Session to update. * </pre> * * <code> * .google.cloud.discoveryengine.v1alpha.Session session = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.discoveryengine.v1alpha.Session, com.google.cloud.discoveryengine.v1alpha.Session.Builder, com.google.cloud.discoveryengine.v1alpha.SessionOrBuilder> getSessionFieldBuilder() { if (sessionBuilder_ == null) { sessionBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.discoveryengine.v1alpha.Session, com.google.cloud.discoveryengine.v1alpha.Session.Builder, com.google.cloud.discoveryengine.v1alpha.SessionOrBuilder>( getSession(), getParentForChildren(), isClean()); session_ = null; } return sessionBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1alpha.Session] to update. The * following are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1alpha.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1alpha.Session] to update. The * following are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1alpha.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1alpha.Session] to update. The * following are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1alpha.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1alpha.Session] to update. The * following are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1alpha.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1alpha.Session] to update. The * following are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1alpha.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1alpha.Session] to update. The * following are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1alpha.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1alpha.Session] to update. The * following are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1alpha.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1alpha.Session] to update. The * following are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1alpha.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Indicates which fields in the provided * [Session][google.cloud.discoveryengine.v1alpha.Session] to update. The * following are NOT supported: * * * [Session.name][google.cloud.discoveryengine.v1alpha.Session.name] * * If not set or empty, all supported fields are updated. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.discoveryengine.v1alpha.UpdateSessionRequest) } // @@protoc_insertion_point(class_scope:google.cloud.discoveryengine.v1alpha.UpdateSessionRequest) private static final com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest(); } public static com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateSessionRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateSessionRequest>() { @java.lang.Override public UpdateSessionRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateSessionRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateSessionRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.discoveryengine.v1alpha.UpdateSessionRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/hive
36,069
ql/src/gen/thrift/gen-javabean/org/apache/hadoop/hive/ql/plan/api/Operator.java
/** * Autogenerated by Thrift Compiler (0.16.0) * * DO NOT EDIT UNLESS YOU ARE SURE THAT YOU KNOW WHAT YOU ARE DOING * @generated */ package org.apache.hadoop.hive.ql.plan.api; @SuppressWarnings({"cast", "rawtypes", "serial", "unchecked", "unused"}) @javax.annotation.Generated(value = "Autogenerated by Thrift Compiler (0.16.0)") public class Operator implements org.apache.thrift.TBase<Operator, Operator._Fields>, java.io.Serializable, Cloneable, Comparable<Operator> { private static final org.apache.thrift.protocol.TStruct STRUCT_DESC = new org.apache.thrift.protocol.TStruct("Operator"); private static final org.apache.thrift.protocol.TField OPERATOR_ID_FIELD_DESC = new org.apache.thrift.protocol.TField("operatorId", org.apache.thrift.protocol.TType.STRING, (short)1); private static final org.apache.thrift.protocol.TField OPERATOR_TYPE_FIELD_DESC = new org.apache.thrift.protocol.TField("operatorType", org.apache.thrift.protocol.TType.I32, (short)2); private static final org.apache.thrift.protocol.TField OPERATOR_ATTRIBUTES_FIELD_DESC = new org.apache.thrift.protocol.TField("operatorAttributes", org.apache.thrift.protocol.TType.MAP, (short)3); private static final org.apache.thrift.protocol.TField OPERATOR_COUNTERS_FIELD_DESC = new org.apache.thrift.protocol.TField("operatorCounters", org.apache.thrift.protocol.TType.MAP, (short)4); private static final org.apache.thrift.protocol.TField DONE_FIELD_DESC = new org.apache.thrift.protocol.TField("done", org.apache.thrift.protocol.TType.BOOL, (short)5); private static final org.apache.thrift.protocol.TField STARTED_FIELD_DESC = new org.apache.thrift.protocol.TField("started", org.apache.thrift.protocol.TType.BOOL, (short)6); private static final org.apache.thrift.scheme.SchemeFactory STANDARD_SCHEME_FACTORY = new OperatorStandardSchemeFactory(); private static final org.apache.thrift.scheme.SchemeFactory TUPLE_SCHEME_FACTORY = new OperatorTupleSchemeFactory(); private @org.apache.thrift.annotation.Nullable java.lang.String operatorId; // required private @org.apache.thrift.annotation.Nullable OperatorType operatorType; // required private @org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.String> operatorAttributes; // required private @org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.Long> operatorCounters; // required private boolean done; // required private boolean started; // required /** The set of fields this struct contains, along with convenience methods for finding and manipulating them. */ public enum _Fields implements org.apache.thrift.TFieldIdEnum { OPERATOR_ID((short)1, "operatorId"), /** * * @see OperatorType */ OPERATOR_TYPE((short)2, "operatorType"), OPERATOR_ATTRIBUTES((short)3, "operatorAttributes"), OPERATOR_COUNTERS((short)4, "operatorCounters"), DONE((short)5, "done"), STARTED((short)6, "started"); private static final java.util.Map<java.lang.String, _Fields> byName = new java.util.HashMap<java.lang.String, _Fields>(); static { for (_Fields field : java.util.EnumSet.allOf(_Fields.class)) { byName.put(field.getFieldName(), field); } } /** * Find the _Fields constant that matches fieldId, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByThriftId(int fieldId) { switch(fieldId) { case 1: // OPERATOR_ID return OPERATOR_ID; case 2: // OPERATOR_TYPE return OPERATOR_TYPE; case 3: // OPERATOR_ATTRIBUTES return OPERATOR_ATTRIBUTES; case 4: // OPERATOR_COUNTERS return OPERATOR_COUNTERS; case 5: // DONE return DONE; case 6: // STARTED return STARTED; default: return null; } } /** * Find the _Fields constant that matches fieldId, throwing an exception * if it is not found. */ public static _Fields findByThriftIdOrThrow(int fieldId) { _Fields fields = findByThriftId(fieldId); if (fields == null) throw new java.lang.IllegalArgumentException("Field " + fieldId + " doesn't exist!"); return fields; } /** * Find the _Fields constant that matches name, or null if its not found. */ @org.apache.thrift.annotation.Nullable public static _Fields findByName(java.lang.String name) { return byName.get(name); } private final short _thriftId; private final java.lang.String _fieldName; _Fields(short thriftId, java.lang.String fieldName) { _thriftId = thriftId; _fieldName = fieldName; } public short getThriftFieldId() { return _thriftId; } public java.lang.String getFieldName() { return _fieldName; } } // isset id assignments private static final int __DONE_ISSET_ID = 0; private static final int __STARTED_ISSET_ID = 1; private byte __isset_bitfield = 0; public static final java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> metaDataMap; static { java.util.Map<_Fields, org.apache.thrift.meta_data.FieldMetaData> tmpMap = new java.util.EnumMap<_Fields, org.apache.thrift.meta_data.FieldMetaData>(_Fields.class); tmpMap.put(_Fields.OPERATOR_ID, new org.apache.thrift.meta_data.FieldMetaData("operatorId", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING))); tmpMap.put(_Fields.OPERATOR_TYPE, new org.apache.thrift.meta_data.FieldMetaData("operatorType", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.EnumMetaData(org.apache.thrift.protocol.TType.ENUM, OperatorType.class))); tmpMap.put(_Fields.OPERATOR_ATTRIBUTES, new org.apache.thrift.meta_data.FieldMetaData("operatorAttributes", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING), new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING)))); tmpMap.put(_Fields.OPERATOR_COUNTERS, new org.apache.thrift.meta_data.FieldMetaData("operatorCounters", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.MapMetaData(org.apache.thrift.protocol.TType.MAP, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.STRING), new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.I64)))); tmpMap.put(_Fields.DONE, new org.apache.thrift.meta_data.FieldMetaData("done", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL))); tmpMap.put(_Fields.STARTED, new org.apache.thrift.meta_data.FieldMetaData("started", org.apache.thrift.TFieldRequirementType.DEFAULT, new org.apache.thrift.meta_data.FieldValueMetaData(org.apache.thrift.protocol.TType.BOOL))); metaDataMap = java.util.Collections.unmodifiableMap(tmpMap); org.apache.thrift.meta_data.FieldMetaData.addStructMetaDataMap(Operator.class, metaDataMap); } public Operator() { } public Operator( java.lang.String operatorId, OperatorType operatorType, java.util.Map<java.lang.String,java.lang.String> operatorAttributes, java.util.Map<java.lang.String,java.lang.Long> operatorCounters, boolean done, boolean started) { this(); this.operatorId = operatorId; this.operatorType = operatorType; this.operatorAttributes = operatorAttributes; this.operatorCounters = operatorCounters; this.done = done; setDoneIsSet(true); this.started = started; setStartedIsSet(true); } /** * Performs a deep copy on <i>other</i>. */ public Operator(Operator other) { __isset_bitfield = other.__isset_bitfield; if (other.isSetOperatorId()) { this.operatorId = other.operatorId; } if (other.isSetOperatorType()) { this.operatorType = other.operatorType; } if (other.isSetOperatorAttributes()) { java.util.Map<java.lang.String,java.lang.String> __this__operatorAttributes = new java.util.HashMap<java.lang.String,java.lang.String>(other.operatorAttributes); this.operatorAttributes = __this__operatorAttributes; } if (other.isSetOperatorCounters()) { java.util.Map<java.lang.String,java.lang.Long> __this__operatorCounters = new java.util.HashMap<java.lang.String,java.lang.Long>(other.operatorCounters); this.operatorCounters = __this__operatorCounters; } this.done = other.done; this.started = other.started; } public Operator deepCopy() { return new Operator(this); } @Override public void clear() { this.operatorId = null; this.operatorType = null; this.operatorAttributes = null; this.operatorCounters = null; setDoneIsSet(false); this.done = false; setStartedIsSet(false); this.started = false; } @org.apache.thrift.annotation.Nullable public java.lang.String getOperatorId() { return this.operatorId; } public void setOperatorId(@org.apache.thrift.annotation.Nullable java.lang.String operatorId) { this.operatorId = operatorId; } public void unsetOperatorId() { this.operatorId = null; } /** Returns true if field operatorId is set (has been assigned a value) and false otherwise */ public boolean isSetOperatorId() { return this.operatorId != null; } public void setOperatorIdIsSet(boolean value) { if (!value) { this.operatorId = null; } } /** * * @see OperatorType */ @org.apache.thrift.annotation.Nullable public OperatorType getOperatorType() { return this.operatorType; } /** * * @see OperatorType */ public void setOperatorType(@org.apache.thrift.annotation.Nullable OperatorType operatorType) { this.operatorType = operatorType; } public void unsetOperatorType() { this.operatorType = null; } /** Returns true if field operatorType is set (has been assigned a value) and false otherwise */ public boolean isSetOperatorType() { return this.operatorType != null; } public void setOperatorTypeIsSet(boolean value) { if (!value) { this.operatorType = null; } } public int getOperatorAttributesSize() { return (this.operatorAttributes == null) ? 0 : this.operatorAttributes.size(); } public void putToOperatorAttributes(java.lang.String key, java.lang.String val) { if (this.operatorAttributes == null) { this.operatorAttributes = new java.util.HashMap<java.lang.String,java.lang.String>(); } this.operatorAttributes.put(key, val); } @org.apache.thrift.annotation.Nullable public java.util.Map<java.lang.String,java.lang.String> getOperatorAttributes() { return this.operatorAttributes; } public void setOperatorAttributes(@org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.String> operatorAttributes) { this.operatorAttributes = operatorAttributes; } public void unsetOperatorAttributes() { this.operatorAttributes = null; } /** Returns true if field operatorAttributes is set (has been assigned a value) and false otherwise */ public boolean isSetOperatorAttributes() { return this.operatorAttributes != null; } public void setOperatorAttributesIsSet(boolean value) { if (!value) { this.operatorAttributes = null; } } public int getOperatorCountersSize() { return (this.operatorCounters == null) ? 0 : this.operatorCounters.size(); } public void putToOperatorCounters(java.lang.String key, long val) { if (this.operatorCounters == null) { this.operatorCounters = new java.util.HashMap<java.lang.String,java.lang.Long>(); } this.operatorCounters.put(key, val); } @org.apache.thrift.annotation.Nullable public java.util.Map<java.lang.String,java.lang.Long> getOperatorCounters() { return this.operatorCounters; } public void setOperatorCounters(@org.apache.thrift.annotation.Nullable java.util.Map<java.lang.String,java.lang.Long> operatorCounters) { this.operatorCounters = operatorCounters; } public void unsetOperatorCounters() { this.operatorCounters = null; } /** Returns true if field operatorCounters is set (has been assigned a value) and false otherwise */ public boolean isSetOperatorCounters() { return this.operatorCounters != null; } public void setOperatorCountersIsSet(boolean value) { if (!value) { this.operatorCounters = null; } } public boolean isDone() { return this.done; } public void setDone(boolean done) { this.done = done; setDoneIsSet(true); } public void unsetDone() { __isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __DONE_ISSET_ID); } /** Returns true if field done is set (has been assigned a value) and false otherwise */ public boolean isSetDone() { return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __DONE_ISSET_ID); } public void setDoneIsSet(boolean value) { __isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __DONE_ISSET_ID, value); } public boolean isStarted() { return this.started; } public void setStarted(boolean started) { this.started = started; setStartedIsSet(true); } public void unsetStarted() { __isset_bitfield = org.apache.thrift.EncodingUtils.clearBit(__isset_bitfield, __STARTED_ISSET_ID); } /** Returns true if field started is set (has been assigned a value) and false otherwise */ public boolean isSetStarted() { return org.apache.thrift.EncodingUtils.testBit(__isset_bitfield, __STARTED_ISSET_ID); } public void setStartedIsSet(boolean value) { __isset_bitfield = org.apache.thrift.EncodingUtils.setBit(__isset_bitfield, __STARTED_ISSET_ID, value); } public void setFieldValue(_Fields field, @org.apache.thrift.annotation.Nullable java.lang.Object value) { switch (field) { case OPERATOR_ID: if (value == null) { unsetOperatorId(); } else { setOperatorId((java.lang.String)value); } break; case OPERATOR_TYPE: if (value == null) { unsetOperatorType(); } else { setOperatorType((OperatorType)value); } break; case OPERATOR_ATTRIBUTES: if (value == null) { unsetOperatorAttributes(); } else { setOperatorAttributes((java.util.Map<java.lang.String,java.lang.String>)value); } break; case OPERATOR_COUNTERS: if (value == null) { unsetOperatorCounters(); } else { setOperatorCounters((java.util.Map<java.lang.String,java.lang.Long>)value); } break; case DONE: if (value == null) { unsetDone(); } else { setDone((java.lang.Boolean)value); } break; case STARTED: if (value == null) { unsetStarted(); } else { setStarted((java.lang.Boolean)value); } break; } } @org.apache.thrift.annotation.Nullable public java.lang.Object getFieldValue(_Fields field) { switch (field) { case OPERATOR_ID: return getOperatorId(); case OPERATOR_TYPE: return getOperatorType(); case OPERATOR_ATTRIBUTES: return getOperatorAttributes(); case OPERATOR_COUNTERS: return getOperatorCounters(); case DONE: return isDone(); case STARTED: return isStarted(); } throw new java.lang.IllegalStateException(); } /** Returns true if field corresponding to fieldID is set (has been assigned a value) and false otherwise */ public boolean isSet(_Fields field) { if (field == null) { throw new java.lang.IllegalArgumentException(); } switch (field) { case OPERATOR_ID: return isSetOperatorId(); case OPERATOR_TYPE: return isSetOperatorType(); case OPERATOR_ATTRIBUTES: return isSetOperatorAttributes(); case OPERATOR_COUNTERS: return isSetOperatorCounters(); case DONE: return isSetDone(); case STARTED: return isSetStarted(); } throw new java.lang.IllegalStateException(); } @Override public boolean equals(java.lang.Object that) { if (that instanceof Operator) return this.equals((Operator)that); return false; } public boolean equals(Operator that) { if (that == null) return false; if (this == that) return true; boolean this_present_operatorId = true && this.isSetOperatorId(); boolean that_present_operatorId = true && that.isSetOperatorId(); if (this_present_operatorId || that_present_operatorId) { if (!(this_present_operatorId && that_present_operatorId)) return false; if (!this.operatorId.equals(that.operatorId)) return false; } boolean this_present_operatorType = true && this.isSetOperatorType(); boolean that_present_operatorType = true && that.isSetOperatorType(); if (this_present_operatorType || that_present_operatorType) { if (!(this_present_operatorType && that_present_operatorType)) return false; if (!this.operatorType.equals(that.operatorType)) return false; } boolean this_present_operatorAttributes = true && this.isSetOperatorAttributes(); boolean that_present_operatorAttributes = true && that.isSetOperatorAttributes(); if (this_present_operatorAttributes || that_present_operatorAttributes) { if (!(this_present_operatorAttributes && that_present_operatorAttributes)) return false; if (!this.operatorAttributes.equals(that.operatorAttributes)) return false; } boolean this_present_operatorCounters = true && this.isSetOperatorCounters(); boolean that_present_operatorCounters = true && that.isSetOperatorCounters(); if (this_present_operatorCounters || that_present_operatorCounters) { if (!(this_present_operatorCounters && that_present_operatorCounters)) return false; if (!this.operatorCounters.equals(that.operatorCounters)) return false; } boolean this_present_done = true; boolean that_present_done = true; if (this_present_done || that_present_done) { if (!(this_present_done && that_present_done)) return false; if (this.done != that.done) return false; } boolean this_present_started = true; boolean that_present_started = true; if (this_present_started || that_present_started) { if (!(this_present_started && that_present_started)) return false; if (this.started != that.started) return false; } return true; } @Override public int hashCode() { int hashCode = 1; hashCode = hashCode * 8191 + ((isSetOperatorId()) ? 131071 : 524287); if (isSetOperatorId()) hashCode = hashCode * 8191 + operatorId.hashCode(); hashCode = hashCode * 8191 + ((isSetOperatorType()) ? 131071 : 524287); if (isSetOperatorType()) hashCode = hashCode * 8191 + operatorType.getValue(); hashCode = hashCode * 8191 + ((isSetOperatorAttributes()) ? 131071 : 524287); if (isSetOperatorAttributes()) hashCode = hashCode * 8191 + operatorAttributes.hashCode(); hashCode = hashCode * 8191 + ((isSetOperatorCounters()) ? 131071 : 524287); if (isSetOperatorCounters()) hashCode = hashCode * 8191 + operatorCounters.hashCode(); hashCode = hashCode * 8191 + ((done) ? 131071 : 524287); hashCode = hashCode * 8191 + ((started) ? 131071 : 524287); return hashCode; } @Override public int compareTo(Operator other) { if (!getClass().equals(other.getClass())) { return getClass().getName().compareTo(other.getClass().getName()); } int lastComparison = 0; lastComparison = java.lang.Boolean.compare(isSetOperatorId(), other.isSetOperatorId()); if (lastComparison != 0) { return lastComparison; } if (isSetOperatorId()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.operatorId, other.operatorId); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetOperatorType(), other.isSetOperatorType()); if (lastComparison != 0) { return lastComparison; } if (isSetOperatorType()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.operatorType, other.operatorType); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetOperatorAttributes(), other.isSetOperatorAttributes()); if (lastComparison != 0) { return lastComparison; } if (isSetOperatorAttributes()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.operatorAttributes, other.operatorAttributes); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetOperatorCounters(), other.isSetOperatorCounters()); if (lastComparison != 0) { return lastComparison; } if (isSetOperatorCounters()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.operatorCounters, other.operatorCounters); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetDone(), other.isSetDone()); if (lastComparison != 0) { return lastComparison; } if (isSetDone()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.done, other.done); if (lastComparison != 0) { return lastComparison; } } lastComparison = java.lang.Boolean.compare(isSetStarted(), other.isSetStarted()); if (lastComparison != 0) { return lastComparison; } if (isSetStarted()) { lastComparison = org.apache.thrift.TBaseHelper.compareTo(this.started, other.started); if (lastComparison != 0) { return lastComparison; } } return 0; } @org.apache.thrift.annotation.Nullable public _Fields fieldForId(int fieldId) { return _Fields.findByThriftId(fieldId); } public void read(org.apache.thrift.protocol.TProtocol iprot) throws org.apache.thrift.TException { scheme(iprot).read(iprot, this); } public void write(org.apache.thrift.protocol.TProtocol oprot) throws org.apache.thrift.TException { scheme(oprot).write(oprot, this); } @Override public java.lang.String toString() { java.lang.StringBuilder sb = new java.lang.StringBuilder("Operator("); boolean first = true; sb.append("operatorId:"); if (this.operatorId == null) { sb.append("null"); } else { sb.append(this.operatorId); } first = false; if (!first) sb.append(", "); sb.append("operatorType:"); if (this.operatorType == null) { sb.append("null"); } else { sb.append(this.operatorType); } first = false; if (!first) sb.append(", "); sb.append("operatorAttributes:"); if (this.operatorAttributes == null) { sb.append("null"); } else { sb.append(this.operatorAttributes); } first = false; if (!first) sb.append(", "); sb.append("operatorCounters:"); if (this.operatorCounters == null) { sb.append("null"); } else { sb.append(this.operatorCounters); } first = false; if (!first) sb.append(", "); sb.append("done:"); sb.append(this.done); first = false; if (!first) sb.append(", "); sb.append("started:"); sb.append(this.started); first = false; sb.append(")"); return sb.toString(); } public void validate() throws org.apache.thrift.TException { // check for required fields // check for sub-struct validity } private void writeObject(java.io.ObjectOutputStream out) throws java.io.IOException { try { write(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(out))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private void readObject(java.io.ObjectInputStream in) throws java.io.IOException, java.lang.ClassNotFoundException { try { // it doesn't seem like you should have to do this, but java serialization is wacky, and doesn't call the default constructor. __isset_bitfield = 0; read(new org.apache.thrift.protocol.TCompactProtocol(new org.apache.thrift.transport.TIOStreamTransport(in))); } catch (org.apache.thrift.TException te) { throw new java.io.IOException(te); } } private static class OperatorStandardSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { public OperatorStandardScheme getScheme() { return new OperatorStandardScheme(); } } private static class OperatorStandardScheme extends org.apache.thrift.scheme.StandardScheme<Operator> { public void read(org.apache.thrift.protocol.TProtocol iprot, Operator struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TField schemeField; iprot.readStructBegin(); while (true) { schemeField = iprot.readFieldBegin(); if (schemeField.type == org.apache.thrift.protocol.TType.STOP) { break; } switch (schemeField.id) { case 1: // OPERATOR_ID if (schemeField.type == org.apache.thrift.protocol.TType.STRING) { struct.operatorId = iprot.readString(); struct.setOperatorIdIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 2: // OPERATOR_TYPE if (schemeField.type == org.apache.thrift.protocol.TType.I32) { struct.operatorType = org.apache.hadoop.hive.ql.plan.api.OperatorType.findByValue(iprot.readI32()); struct.setOperatorTypeIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 3: // OPERATOR_ATTRIBUTES if (schemeField.type == org.apache.thrift.protocol.TType.MAP) { { org.apache.thrift.protocol.TMap _map24 = iprot.readMapBegin(); struct.operatorAttributes = new java.util.HashMap<java.lang.String,java.lang.String>(2*_map24.size); @org.apache.thrift.annotation.Nullable java.lang.String _key25; @org.apache.thrift.annotation.Nullable java.lang.String _val26; for (int _i27 = 0; _i27 < _map24.size; ++_i27) { _key25 = iprot.readString(); _val26 = iprot.readString(); struct.operatorAttributes.put(_key25, _val26); } iprot.readMapEnd(); } struct.setOperatorAttributesIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 4: // OPERATOR_COUNTERS if (schemeField.type == org.apache.thrift.protocol.TType.MAP) { { org.apache.thrift.protocol.TMap _map28 = iprot.readMapBegin(); struct.operatorCounters = new java.util.HashMap<java.lang.String,java.lang.Long>(2*_map28.size); @org.apache.thrift.annotation.Nullable java.lang.String _key29; long _val30; for (int _i31 = 0; _i31 < _map28.size; ++_i31) { _key29 = iprot.readString(); _val30 = iprot.readI64(); struct.operatorCounters.put(_key29, _val30); } iprot.readMapEnd(); } struct.setOperatorCountersIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 5: // DONE if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) { struct.done = iprot.readBool(); struct.setDoneIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; case 6: // STARTED if (schemeField.type == org.apache.thrift.protocol.TType.BOOL) { struct.started = iprot.readBool(); struct.setStartedIsSet(true); } else { org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } break; default: org.apache.thrift.protocol.TProtocolUtil.skip(iprot, schemeField.type); } iprot.readFieldEnd(); } iprot.readStructEnd(); struct.validate(); } public void write(org.apache.thrift.protocol.TProtocol oprot, Operator struct) throws org.apache.thrift.TException { struct.validate(); oprot.writeStructBegin(STRUCT_DESC); if (struct.operatorId != null) { oprot.writeFieldBegin(OPERATOR_ID_FIELD_DESC); oprot.writeString(struct.operatorId); oprot.writeFieldEnd(); } if (struct.operatorType != null) { oprot.writeFieldBegin(OPERATOR_TYPE_FIELD_DESC); oprot.writeI32(struct.operatorType.getValue()); oprot.writeFieldEnd(); } if (struct.operatorAttributes != null) { oprot.writeFieldBegin(OPERATOR_ATTRIBUTES_FIELD_DESC); { oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING, struct.operatorAttributes.size())); for (java.util.Map.Entry<java.lang.String, java.lang.String> _iter32 : struct.operatorAttributes.entrySet()) { oprot.writeString(_iter32.getKey()); oprot.writeString(_iter32.getValue()); } oprot.writeMapEnd(); } oprot.writeFieldEnd(); } if (struct.operatorCounters != null) { oprot.writeFieldBegin(OPERATOR_COUNTERS_FIELD_DESC); { oprot.writeMapBegin(new org.apache.thrift.protocol.TMap(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.I64, struct.operatorCounters.size())); for (java.util.Map.Entry<java.lang.String, java.lang.Long> _iter33 : struct.operatorCounters.entrySet()) { oprot.writeString(_iter33.getKey()); oprot.writeI64(_iter33.getValue()); } oprot.writeMapEnd(); } oprot.writeFieldEnd(); } oprot.writeFieldBegin(DONE_FIELD_DESC); oprot.writeBool(struct.done); oprot.writeFieldEnd(); oprot.writeFieldBegin(STARTED_FIELD_DESC); oprot.writeBool(struct.started); oprot.writeFieldEnd(); oprot.writeFieldStop(); oprot.writeStructEnd(); } } private static class OperatorTupleSchemeFactory implements org.apache.thrift.scheme.SchemeFactory { public OperatorTupleScheme getScheme() { return new OperatorTupleScheme(); } } private static class OperatorTupleScheme extends org.apache.thrift.scheme.TupleScheme<Operator> { @Override public void write(org.apache.thrift.protocol.TProtocol prot, Operator struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol oprot = (org.apache.thrift.protocol.TTupleProtocol) prot; java.util.BitSet optionals = new java.util.BitSet(); if (struct.isSetOperatorId()) { optionals.set(0); } if (struct.isSetOperatorType()) { optionals.set(1); } if (struct.isSetOperatorAttributes()) { optionals.set(2); } if (struct.isSetOperatorCounters()) { optionals.set(3); } if (struct.isSetDone()) { optionals.set(4); } if (struct.isSetStarted()) { optionals.set(5); } oprot.writeBitSet(optionals, 6); if (struct.isSetOperatorId()) { oprot.writeString(struct.operatorId); } if (struct.isSetOperatorType()) { oprot.writeI32(struct.operatorType.getValue()); } if (struct.isSetOperatorAttributes()) { { oprot.writeI32(struct.operatorAttributes.size()); for (java.util.Map.Entry<java.lang.String, java.lang.String> _iter34 : struct.operatorAttributes.entrySet()) { oprot.writeString(_iter34.getKey()); oprot.writeString(_iter34.getValue()); } } } if (struct.isSetOperatorCounters()) { { oprot.writeI32(struct.operatorCounters.size()); for (java.util.Map.Entry<java.lang.String, java.lang.Long> _iter35 : struct.operatorCounters.entrySet()) { oprot.writeString(_iter35.getKey()); oprot.writeI64(_iter35.getValue()); } } } if (struct.isSetDone()) { oprot.writeBool(struct.done); } if (struct.isSetStarted()) { oprot.writeBool(struct.started); } } @Override public void read(org.apache.thrift.protocol.TProtocol prot, Operator struct) throws org.apache.thrift.TException { org.apache.thrift.protocol.TTupleProtocol iprot = (org.apache.thrift.protocol.TTupleProtocol) prot; java.util.BitSet incoming = iprot.readBitSet(6); if (incoming.get(0)) { struct.operatorId = iprot.readString(); struct.setOperatorIdIsSet(true); } if (incoming.get(1)) { struct.operatorType = org.apache.hadoop.hive.ql.plan.api.OperatorType.findByValue(iprot.readI32()); struct.setOperatorTypeIsSet(true); } if (incoming.get(2)) { { org.apache.thrift.protocol.TMap _map36 = iprot.readMapBegin(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.STRING); struct.operatorAttributes = new java.util.HashMap<java.lang.String,java.lang.String>(2*_map36.size); @org.apache.thrift.annotation.Nullable java.lang.String _key37; @org.apache.thrift.annotation.Nullable java.lang.String _val38; for (int _i39 = 0; _i39 < _map36.size; ++_i39) { _key37 = iprot.readString(); _val38 = iprot.readString(); struct.operatorAttributes.put(_key37, _val38); } } struct.setOperatorAttributesIsSet(true); } if (incoming.get(3)) { { org.apache.thrift.protocol.TMap _map40 = iprot.readMapBegin(org.apache.thrift.protocol.TType.STRING, org.apache.thrift.protocol.TType.I64); struct.operatorCounters = new java.util.HashMap<java.lang.String,java.lang.Long>(2*_map40.size); @org.apache.thrift.annotation.Nullable java.lang.String _key41; long _val42; for (int _i43 = 0; _i43 < _map40.size; ++_i43) { _key41 = iprot.readString(); _val42 = iprot.readI64(); struct.operatorCounters.put(_key41, _val42); } } struct.setOperatorCountersIsSet(true); } if (incoming.get(4)) { struct.done = iprot.readBool(); struct.setDoneIsSet(true); } if (incoming.get(5)) { struct.started = iprot.readBool(); struct.setStartedIsSet(true); } } } private static <S extends org.apache.thrift.scheme.IScheme> S scheme(org.apache.thrift.protocol.TProtocol proto) { return (org.apache.thrift.scheme.StandardScheme.class.equals(proto.getScheme()) ? STANDARD_SCHEME_FACTORY : TUPLE_SCHEME_FACTORY).getScheme(); } }
apache/druid
36,138
server/src/main/java/org/apache/druid/segment/metadata/CoordinatorSegmentMetadataCache.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.druid.segment.metadata; import com.google.common.annotations.VisibleForTesting; import com.google.common.base.Supplier; import com.google.common.collect.FluentIterable; import com.google.common.collect.ImmutableMap; import com.google.common.collect.Iterables; import com.google.common.collect.Sets; import com.google.inject.Inject; import org.apache.druid.client.CoordinatorServerView; import org.apache.druid.client.DataSourcesSnapshot; import org.apache.druid.client.ImmutableDruidDataSource; import org.apache.druid.client.InternalQueryConfig; import org.apache.druid.client.ServerView; import org.apache.druid.client.TimelineServerView; import org.apache.druid.guice.ManageLifecycle; import org.apache.druid.java.util.common.ISE; import org.apache.druid.java.util.common.Stopwatch; import org.apache.druid.java.util.common.StringUtils; import org.apache.druid.java.util.common.concurrent.Execs; import org.apache.druid.java.util.common.lifecycle.LifecycleStart; import org.apache.druid.java.util.common.lifecycle.LifecycleStop; import org.apache.druid.java.util.emitter.EmittingLogger; import org.apache.druid.java.util.emitter.service.ServiceEmitter; import org.apache.druid.java.util.emitter.service.ServiceMetricEvent; import org.apache.druid.metadata.SegmentsMetadataManager; import org.apache.druid.metadata.SegmentsMetadataManagerConfig; import org.apache.druid.query.DruidMetrics; import org.apache.druid.query.aggregation.AggregatorFactory; import org.apache.druid.query.metadata.metadata.SegmentAnalysis; import org.apache.druid.segment.SchemaPayload; import org.apache.druid.segment.SchemaPayloadPlus; import org.apache.druid.segment.column.ColumnType; import org.apache.druid.segment.column.RowSignature; import org.apache.druid.segment.realtime.appenderator.SegmentSchemas; import org.apache.druid.server.QueryLifecycleFactory; import org.apache.druid.server.coordination.DruidServerMetadata; import org.apache.druid.server.coordinator.loading.SegmentReplicaCount; import org.apache.druid.server.coordinator.loading.SegmentReplicationStatus; import org.apache.druid.server.security.Escalator; import org.apache.druid.timeline.DataSegment; import org.apache.druid.timeline.SegmentId; import javax.annotation.Nullable; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Iterator; import java.util.LinkedHashMap; import java.util.List; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.concurrent.ConcurrentHashMap; import java.util.concurrent.ConcurrentSkipListMap; import java.util.concurrent.Future; import java.util.concurrent.ScheduledExecutorService; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicBoolean; /** * Coordinator-side cache of segment metadata that combines segments to build * datasources. The cache provides metadata about a datasource, see {@link DataSourceInformation}. * <p> * Major differences from the other implementation {@code BrokerSegmentMetadataCache} are, * <li>The refresh is executed only on the leader Coordinator node.</li> * <li>Realtime segment schema refresh. Schema update for realtime segment is pushed periodically. * The schema is merged with any existing schema for the segment and the cache is updated. * Corresponding datasource is marked for refresh.</li> * <li>The refresh mechanism is significantly different from the other implementation, * <ul><li>Metadata query is executed only for those non-realtime segments for which the schema is not cached.</li> * <li>Datasources marked for refresh are then rebuilt.</li></ul> * </li> * <p> * It is important to note that the datasource schema returned in {@link #getDatasource} & {@link #getDataSourceInformationMap()} * also includes columns from cold segments. * Cold segments are processed in a separate thread and datasource schema from cold segments is separately stored. * </p> */ @ManageLifecycle public class CoordinatorSegmentMetadataCache extends AbstractSegmentMetadataCache<DataSourceInformation> { private static final EmittingLogger log = new EmittingLogger(CoordinatorSegmentMetadataCache.class); private static final Long COLD_SCHEMA_PERIOD_MULTIPLIER = 3L; private static final Long COLD_SCHEMA_SLOWNESS_THRESHOLD_MILLIS = TimeUnit.SECONDS.toMillis(50); private final SegmentMetadataCacheConfig config; private final ColumnTypeMergePolicy columnTypeMergePolicy; private final SegmentSchemaCache segmentSchemaCache; private final SegmentSchemaBackFillQueue segmentSchemaBackfillQueue; private final SegmentsMetadataManager segmentsMetadataManager; private final Supplier<SegmentsMetadataManagerConfig> segmentsMetadataManagerConfigSupplier; private volatile SegmentReplicationStatus segmentReplicationStatus = null; /** * Datasource schema built from cold segments only. {@link #tables} contains * schema built from hot segments, i.e. segments present on historicals. * The overall schema of a datasource is obtained by merging the two schemas. */ private final ConcurrentHashMap<String, DataSourceInformation> coldSchemaTable = new ConcurrentHashMap<>(); // Period for cold schema processing thread. This is a multiple of segment polling period. // Cold schema processing runs slower than the segment poll to save processing cost of all segments. // The downside is a delay in columns from cold segment reflecting in the datasource schema. private final ScheduledExecutorService coldSchemaExec; private @Nullable Future<?> cacheExecFuture = null; private @Nullable Future<?> coldSchemaExecFuture = null; @Inject public CoordinatorSegmentMetadataCache( QueryLifecycleFactory queryLifecycleFactory, CoordinatorServerView serverView, SegmentMetadataCacheConfig config, Escalator escalator, InternalQueryConfig internalQueryConfig, ServiceEmitter emitter, SegmentSchemaCache segmentSchemaCache, SegmentSchemaBackFillQueue segmentSchemaBackfillQueue, SegmentsMetadataManager segmentsMetadataManager, Supplier<SegmentsMetadataManagerConfig> segmentsMetadataManagerConfigSupplier ) { super(queryLifecycleFactory, config, escalator, internalQueryConfig, emitter); this.config = config; this.columnTypeMergePolicy = config.getMetadataColumnTypeMergePolicy(); this.segmentSchemaCache = segmentSchemaCache; this.segmentSchemaBackfillQueue = segmentSchemaBackfillQueue; this.segmentsMetadataManager = segmentsMetadataManager; this.segmentsMetadataManagerConfigSupplier = segmentsMetadataManagerConfigSupplier; this.coldSchemaExec = Execs.scheduledSingleThreaded("DruidColdSchema-ScheduledExecutor-%d"); initServerViewTimelineCallback(serverView); } private long getColdSchemaExecPeriodMillis() { return (segmentsMetadataManagerConfigSupplier.get().getPollDuration().toStandardDuration().getMillis()) * COLD_SCHEMA_PERIOD_MULTIPLIER; } private void initServerViewTimelineCallback(final CoordinatorServerView serverView) { serverView.registerTimelineCallback( callbackExec, new TimelineServerView.TimelineCallback() { @Override public ServerView.CallbackAction timelineInitialized() { synchronized (lock) { isServerViewInitialized = true; lock.notifyAll(); } return ServerView.CallbackAction.CONTINUE; } @Override public ServerView.CallbackAction segmentAdded(final DruidServerMetadata server, final DataSegment segment) { addSegment(server, segment); return ServerView.CallbackAction.CONTINUE; } @Override public ServerView.CallbackAction segmentRemoved(final DataSegment segment) { removeSegment(segment); return ServerView.CallbackAction.CONTINUE; } @Override public ServerView.CallbackAction serverSegmentRemoved( final DruidServerMetadata server, final DataSegment segment ) { removeServerSegment(server, segment); return ServerView.CallbackAction.CONTINUE; } @Override public ServerView.CallbackAction segmentSchemasAnnounced(SegmentSchemas segmentSchemas) { updateSchemaForRealtimeSegments(segmentSchemas); return ServerView.CallbackAction.CONTINUE; } } ); } @LifecycleStart @Override public void start() { // noop, refresh is started only on leader node } @LifecycleStop @Override public void stop() { callbackExec.shutdownNow(); cacheExec.shutdownNow(); coldSchemaExec.shutdownNow(); segmentSchemaCache.onLeaderStop(); segmentSchemaBackfillQueue.onLeaderStop(); if (cacheExecFuture != null) { cacheExecFuture.cancel(true); } if (coldSchemaExecFuture != null) { coldSchemaExecFuture.cancel(true); } } public void onLeaderStart() { log.info("Initializing cache on leader node."); try { segmentSchemaBackfillQueue.onLeaderStart(); cacheExecFuture = cacheExec.submit(this::cacheExecLoop); coldSchemaExecFuture = coldSchemaExec.scheduleWithFixedDelay( this::refreshColdSegmentSchemas, getColdSchemaExecPeriodMillis(), getColdSchemaExecPeriodMillis(), TimeUnit.MILLISECONDS ); if (config.isAwaitInitializationOnStart()) { awaitInitialization(); } } catch (Exception e) { throw new RuntimeException(e); } } public void onLeaderStop() { log.info("No longer leader, stopping cache."); if (cacheExecFuture != null) { cacheExecFuture.cancel(true); } if (coldSchemaExecFuture != null) { coldSchemaExecFuture.cancel(true); } segmentSchemaCache.onLeaderStop(); segmentSchemaBackfillQueue.onLeaderStop(); } /** * This method ensures that the refresh goes through only when schemaCache is initialized. */ @Override public synchronized void refreshWaitCondition() throws InterruptedException { segmentSchemaCache.awaitInitialization(); } public void updateSegmentReplicationStatus(SegmentReplicationStatus segmentReplicationStatus) { this.segmentReplicationStatus = segmentReplicationStatus; } @Override protected void unmarkSegmentAsMutable(SegmentId segmentId) { synchronized (lock) { log.debug("SegmentId [%s] is marked as finalized.", segmentId); mutableSegments.remove(segmentId); // remove it from the realtime schema cache segmentSchemaCache.realtimeSegmentRemoved(segmentId); } } @Override protected void removeSegmentAction(SegmentId segmentId) { log.debug("SegmentId [%s] is removed.", segmentId); segmentSchemaCache.segmentRemoved(segmentId); } @Override protected boolean fetchAggregatorsInSegmentMetadataQuery() { return true; } @Override protected boolean updateSegmentMetadata( SegmentId segmentId, SegmentAnalysis analysis ) { final RowSignature rowSignature = analysisToRowSignature(analysis); log.debug("Segment[%s] has signature[%s].", segmentId, rowSignature); AtomicBoolean added = new AtomicBoolean(false); segmentMetadataInfo.compute( segmentId.getDataSource(), (datasourceKey, dataSourceSegments) -> { if (dataSourceSegments == null) { // Datasource may have been removed or become unavailable while this refresh was ongoing. log.warn( "No segment map found with datasource [%s], skipping refresh of segment [%s]", datasourceKey, segmentId ); return null; } else { dataSourceSegments.compute( segmentId, (segmentIdKey, segmentMetadata) -> { if (segmentMetadata == null) { log.warn("No segment [%s] found, skipping refresh", segmentId); return null; } else { long numRows = analysis.getNumRows(); log.debug("Publishing segment schema. SegmentId [%s], RowSignature [%s], numRows [%d]", segmentId, rowSignature, numRows); Map<String, AggregatorFactory> aggregators = analysis.getAggregators(); // Add the schema to the cache and queue it for backfill to DB final SchemaPayloadPlus schema = new SchemaPayloadPlus( new SchemaPayload(rowSignature, aggregators), numRows ); segmentSchemaCache.addSchemaPendingBackfill(segmentId, schema); segmentSchemaBackfillQueue.add(segmentId, schema); added.set(true); return segmentMetadata; } } ); if (dataSourceSegments.isEmpty()) { return null; } else { return dataSourceSegments; } } } ); return added.get(); } @Override public Iterator<AvailableSegmentMetadata> iterateSegmentMetadata() { return FluentIterable .from(segmentMetadataInfo.values()) .transformAndConcat(Map::values) .transform( availableSegmentMetadata -> { final SegmentId segmentId = availableSegmentMetadata.getSegment().getId(); final Optional<SchemaPayloadPlus> metadata = segmentSchemaCache.getSchemaForSegment(segmentId); if (metadata.isPresent()) { return AvailableSegmentMetadata.from(availableSegmentMetadata) .withRowSignature(metadata.get().getSchemaPayload().getRowSignature()) .withNumRows(metadata.get().getNumRows()) .build(); } else { markSegmentForRefreshIfNeeded(availableSegmentMetadata.getSegment()); return availableSegmentMetadata; } } ) .iterator(); } @Nullable @Override public AvailableSegmentMetadata getAvailableSegmentMetadata(String datasource, SegmentId segmentId) { ConcurrentSkipListMap<SegmentId, AvailableSegmentMetadata> segmentMap = segmentMetadataInfo.get(datasource); AvailableSegmentMetadata availableSegmentMetadata = null; if (segmentMap != null) { availableSegmentMetadata = segmentMap.get(segmentId); } if (availableSegmentMetadata == null) { return null; } Optional<SchemaPayloadPlus> metadata = segmentSchemaCache.getSchemaForSegment(segmentId); if (metadata.isPresent()) { availableSegmentMetadata = AvailableSegmentMetadata.from(availableSegmentMetadata) .withRowSignature(metadata.get().getSchemaPayload().getRowSignature()) .withNumRows(metadata.get().getNumRows()) .build(); } else { markSegmentForRefreshIfNeeded(availableSegmentMetadata.getSegment()); } return availableSegmentMetadata; } @Override public DataSourceInformation getDatasource(String name) { return getMergedDatasourceInformation(tables.get(name), coldSchemaTable.get(name)).orElse(null); } @Override public Map<String, DataSourceInformation> getDataSourceInformationMap() { Map<String, DataSourceInformation> hot = new HashMap<>(tables); Map<String, DataSourceInformation> cold = new HashMap<>(coldSchemaTable); Set<String> combinedDatasources = new HashSet<>(hot.keySet()); combinedDatasources.addAll(cold.keySet()); ImmutableMap.Builder<String, DataSourceInformation> combined = ImmutableMap.builder(); for (String dataSource : combinedDatasources) { getMergedDatasourceInformation(hot.get(dataSource), cold.get(dataSource)) .ifPresent(merged -> combined.put( dataSource, merged )); } return combined.build(); } private Optional<DataSourceInformation> getMergedDatasourceInformation( final DataSourceInformation hot, final DataSourceInformation cold ) { if (hot == null) { return Optional.ofNullable(cold); } else if (cold == null) { return Optional.of(hot); } else { final Map<String, ColumnType> columnTypes = new LinkedHashMap<>(); List<RowSignature> signatures = new ArrayList<>(); // hot datasource schema takes precedence signatures.add(hot.getRowSignature()); signatures.add(cold.getRowSignature()); for (RowSignature signature : signatures) { extractColumnTypes(columnTypes, signature); } final RowSignature.Builder builder = RowSignature.builder(); columnTypes.forEach(builder::add); return Optional.of(new DataSourceInformation(hot.getDataSource(), builder.build())); } } /** * Executes SegmentMetadataQuery to fetch schema information for each segment in the refresh list. * The schema information for individual segments is combined to construct a table schema, which is then cached. * * @param segmentsToRefresh segments for which the schema might have changed * @param dataSourcesToRebuild datasources for which the schema might have changed * @throws IOException when querying segment from data nodes and tasks */ @Override public void refresh(final Set<SegmentId> segmentsToRefresh, final Set<String> dataSourcesToRebuild) throws IOException { log.debug("Segments to refresh [%s], dataSourcesToRebuild [%s]", segmentsToRefresh, dataSourcesToRebuild); filterRealtimeSegments(segmentsToRefresh); log.debug("SegmentsToRefreshMinusRealtimeSegments [%s]", segmentsToRefresh); final Set<SegmentId> cachedSegments = filterSegmentWithCachedSchema(segmentsToRefresh); log.debug("SegmentsToRefreshMinusCachedSegments [%s], cachedSegments [%s]", segmentsToRefresh, cachedSegments); // Refresh the segments. Set<SegmentId> refreshed = Collections.emptySet(); if (!config.isDisableSegmentMetadataQueries()) { refreshed = refreshSegments(segmentsToRefresh); log.debug("Refreshed segments are [%s]", refreshed); } synchronized (lock) { // Add missing segments back to the refresh list. segmentsNeedingRefresh.addAll(Sets.difference(segmentsToRefresh, refreshed)); // Compute the list of datasources to rebuild tables for. dataSourcesToRebuild.addAll(dataSourcesNeedingRebuild); refreshed.forEach(segment -> dataSourcesToRebuild.add(segment.getDataSource())); cachedSegments.forEach(segment -> dataSourcesToRebuild.add(segment.getDataSource())); dataSourcesNeedingRebuild.clear(); } log.debug("Re-building schema for datasources[%s].", dataSourcesToRebuild); // Rebuild the datasources. for (String dataSource : dataSourcesToRebuild) { final RowSignature rowSignature = buildDataSourceRowSignature(dataSource); if (rowSignature == null) { log.info( "Datasource[%s] no longer exists as its row signature is [null]." + " Removing all cached metadata.", dataSource ); tables.remove(dataSource); continue; } DataSourceInformation druidTable = new DataSourceInformation(dataSource, rowSignature); final DataSourceInformation oldTable = tables.put(dataSource, druidTable); if (oldTable == null || !oldTable.getRowSignature().equals(druidTable.getRowSignature())) { log.info("Datasource[%s] has a new row signature[%s].", dataSource, druidTable.getRowSignature()); } else { log.debug("Datasource[%s] row signature is unchanged.", dataSource); } } } @Override void logSegmentsToRefresh(String dataSource, Set<SegmentId> ids) { log.info( "Refreshing schema of [%d] segment IDs (sample=[%s]) for datasource[%s].", ids.size(), Iterables.limit(ids, 5), dataSource ); } private void filterRealtimeSegments(Set<SegmentId> segmentIds) { synchronized (lock) { segmentIds.removeAll(mutableSegments); } } private Set<SegmentId> filterSegmentWithCachedSchema(Set<SegmentId> segmentIds) { Set<SegmentId> cachedSegments = new HashSet<>(); for (SegmentId id : segmentIds) { if (segmentSchemaCache.isSchemaCached(id)) { cachedSegments.add(id); } } segmentIds.removeAll(cachedSegments); return cachedSegments; } @Nullable private Integer getReplicationFactor(SegmentId segmentId) { if (segmentReplicationStatus == null) { return null; } SegmentReplicaCount replicaCountsInCluster = segmentReplicationStatus.getReplicaCountsInCluster(segmentId); return replicaCountsInCluster == null ? null : replicaCountsInCluster.required(); } /** * Recomputes the cold schema of all datasources and updates in {@link #coldSchemaTable}. * The cold schema row signature is obtained by merging the column types from * the schemas of all cold segments (used segments with zero replication). */ @VisibleForTesting protected void refreshColdSegmentSchemas() { final Stopwatch stopwatch = Stopwatch.createStarted(); // Find cold segments for all datasources int totalColdSegments = 0; final Set<String> dataSourcesWithColdSegments = new HashSet<>(); final DataSourcesSnapshot snapshot = segmentsMetadataManager.getRecentDataSourcesSnapshot(); for (ImmutableDruidDataSource dataSource : snapshot.getDataSourcesWithAllUsedSegments()) { final Map<String, ColumnType> columnTypes = new LinkedHashMap<>(); // Identify cold segments for this datasource int coldSegments = 0; int coldSegmentsWithSchema = 0; for (DataSegment segment : dataSource.getSegments()) { Integer replicationFactor = getReplicationFactor(segment.getId()); if (replicationFactor != null && replicationFactor != 0) { // This is not a cold segment continue; } Optional<SchemaPayloadPlus> optionalSchema = segmentSchemaCache.getSchemaForSegment(segment.getId()); if (optionalSchema.isPresent()) { RowSignature rowSignature = optionalSchema.get().getSchemaPayload().getRowSignature(); extractColumnTypes(columnTypes, rowSignature); coldSegmentsWithSchema++; } coldSegments++; } if (coldSegments == 0) { // this datasource doesn't have any cold segment continue; } totalColdSegments += coldSegments; final String dataSourceName = dataSource.getName(); dataSourcesWithColdSegments.add(dataSourceName); final ServiceMetricEvent.Builder metricBuilder = new ServiceMetricEvent.Builder().setDimension(DruidMetrics.DATASOURCE, dataSourceName); emitMetric(Metric.USED_COLD_SEGMENTS, coldSegments, metricBuilder); if (columnTypes.isEmpty()) { // this datasource doesn't have schema for cold segments continue; } // Build a row signature for cold segments of this datasource final RowSignature.Builder builder = RowSignature.builder(); columnTypes.forEach(builder::add); final RowSignature coldSignature = builder.build(); DataSourceInformation druidTable = new DataSourceInformation(dataSourceName, coldSignature); DataSourceInformation oldTable = coldSchemaTable.put(dataSourceName, druidTable); if (oldTable == null || !oldTable.getRowSignature().equals(druidTable.getRowSignature())) { log.info("Datasource[%s] has new cold row signature[%s].", dataSourceName, druidTable.getRowSignature()); } else { log.debug("Row signature for datasource[%s] is unchanged.", dataSourceName); } emitMetric(Metric.COLD_SEGMENT_SCHEMAS, coldSegmentsWithSchema, metricBuilder); log.debug("Built row signature[%s] from cold segments for datasource[%s].", coldSignature, dataSourceName); } // Remove any stale datasource from the map coldSchemaTable.keySet().retainAll(dataSourcesWithColdSegments); emitMetric( Metric.COLD_SCHEMA_REFRESH_DURATION_MILLIS, stopwatch.millisElapsed() ); int numDatasources = snapshot.getDataSourcesMap().size(); String executionStatsLog = StringUtils.format( "Cold schema processing took [%d] millis. " + "Processed total [%d] datasources, [%d] segments. Found [%d] datasources with cold segment schema.", stopwatch.millisElapsed(), numDatasources, totalColdSegments, dataSourcesWithColdSegments.size() ); if (stopwatch.millisElapsed() > COLD_SCHEMA_SLOWNESS_THRESHOLD_MILLIS) { log.info(executionStatsLog); } else { log.debug(executionStatsLog); } } /** * Extracts column types from the given row signature. */ private void extractColumnTypes(final Map<String, ColumnType> columnTypes, final RowSignature signature) { for (String column : signature.getColumnNames()) { final ColumnType columnType = signature.getColumnType(column) .orElseThrow(() -> new ISE("Encountered null type for column[%s]", column)); columnTypes.compute(column, (c, existingType) -> columnTypeMergePolicy.merge(existingType, columnType)); } } @VisibleForTesting @Nullable @Override public RowSignature buildDataSourceRowSignature(final String dataSource) { ConcurrentSkipListMap<SegmentId, AvailableSegmentMetadata> segmentsMap = segmentMetadataInfo.get(dataSource); // Preserve order. final Map<String, ColumnType> columnTypes = new LinkedHashMap<>(); if (segmentsMap != null && !segmentsMap.isEmpty()) { for (Map.Entry<SegmentId, AvailableSegmentMetadata> entry : segmentsMap.entrySet()) { SegmentId segmentId = entry.getKey(); Optional<SchemaPayloadPlus> optionalSchema = segmentSchemaCache.getSchemaForSegment(segmentId); if (optionalSchema.isPresent()) { RowSignature rowSignature = optionalSchema.get().getSchemaPayload().getRowSignature(); extractColumnTypes(columnTypes, rowSignature); } else { markSegmentForRefreshIfNeeded(entry.getValue().getSegment()); } } } else { // table has no segments return null; } final RowSignature.Builder builder = RowSignature.builder(); columnTypes.forEach(builder::add); return builder.build(); } /** * Update schema for segments. */ @VisibleForTesting void updateSchemaForRealtimeSegments(SegmentSchemas segmentSchemas) { log.debug("SchemaUpdate for realtime segments [%s].", segmentSchemas); List<SegmentSchemas.SegmentSchema> segmentSchemaList = segmentSchemas.getSegmentSchemaList(); for (SegmentSchemas.SegmentSchema segmentSchema : segmentSchemaList) { String dataSource = segmentSchema.getDataSource(); SegmentId segmentId = SegmentId.tryParse(dataSource, segmentSchema.getSegmentId()); if (segmentId == null) { log.error("Could not apply schema update. Failed parsing segmentId [%s]", segmentSchema.getSegmentId()); continue; } log.debug("Applying schema update for segmentId [%s] datasource [%s]", segmentId, dataSource); segmentMetadataInfo.compute( dataSource, (dataSourceKey, segmentsMap) -> { if (segmentsMap == null) { // Datasource may have been removed or become unavailable while this refresh was ongoing. log.warn( "No segment map found with datasource [%s], skipping refresh of segment [%s]", dataSourceKey, segmentId ); return null; } else { segmentsMap.compute( segmentId, (id, segmentMetadata) -> { if (segmentMetadata == null) { // By design, this case shouldn't arise since both segment and schema is announced in the same flow // and messages shouldn't be lost in the poll // also segment announcement should always precede schema announcement // and there shouldn't be any schema updates for removed segments log.makeAlert("Schema update [%s] for unknown segment [%s]", segmentSchema, segmentId).emit(); } else { // We know this segment. Optional<SchemaPayloadPlus> schemaMetadata = segmentSchemaCache.getSchemaForSegment(segmentId); Optional<RowSignature> rowSignature = mergeOrCreateRowSignature( segmentId, schemaMetadata.map( segmentSchemaMetadata -> segmentSchemaMetadata.getSchemaPayload().getRowSignature()) .orElse(null), segmentSchema ); if (rowSignature.isPresent()) { log.debug( "Segment [%s] signature [%s] after applying schema update.", segmentId, rowSignature.get() ); final Long numRows = segmentSchema.getNumRows() == null ? null : segmentSchema.getNumRows().longValue(); segmentSchemaCache.addRealtimeSegmentSchema( segmentId, new SchemaPayloadPlus(new SchemaPayload(rowSignature.get()), numRows) ); // mark the datasource for rebuilding markDataSourceAsNeedRebuild(dataSource); } } return segmentMetadata; } ); return segmentsMap; } } ); } } /** * Merge or create a new RowSignature using the existing RowSignature and schema update. */ @VisibleForTesting Optional<RowSignature> mergeOrCreateRowSignature( SegmentId segmentId, @Nullable RowSignature existingSignature, SegmentSchemas.SegmentSchema segmentSchema ) { if (!segmentSchema.isDelta()) { // absolute schema // override the existing signature // this case could arise when the server restarts or counter mismatch between client and server RowSignature.Builder builder = RowSignature.builder(); Map<String, ColumnType> columnMapping = segmentSchema.getColumnTypeMap(); for (String column : segmentSchema.getNewColumns()) { builder.add(column, columnMapping.get(column)); } return Optional.of(ROW_SIGNATURE_INTERNER.intern(builder.build())); } else if (existingSignature != null) { // delta update // merge with the existing signature RowSignature.Builder builder = RowSignature.builder(); final Map<String, ColumnType> mergedColumnTypes = new LinkedHashMap<>(); for (String column : existingSignature.getColumnNames()) { final ColumnType columnType = existingSignature.getColumnType(column) .orElseThrow(() -> new ISE("Encountered null type for column [%s]", column)); mergedColumnTypes.put(column, columnType); } final Map<String, ColumnType> columnMapping = segmentSchema.getColumnTypeMap(); // column type to be updated is not present in the existing schema final Set<String> missingUpdateColumns = new HashSet<>(); for (String column : segmentSchema.getUpdatedColumns()) { if (!mergedColumnTypes.containsKey(column)) { missingUpdateColumns.add(column); mergedColumnTypes.put(column, columnMapping.get(column)); } else { mergedColumnTypes.compute(column, (c, existingType) -> columnTypeMergePolicy.merge(existingType, columnMapping.get(column))); } } for (String column : segmentSchema.getNewColumns()) { if (mergedColumnTypes.containsKey(column)) { mergedColumnTypes.compute(column, (c, existingType) -> columnTypeMergePolicy.merge(existingType, columnMapping.get(column))); } else { mergedColumnTypes.put(column, columnMapping.get(column)); } } if (!missingUpdateColumns.isEmpty()) { log.makeAlert( "Datasource schema mismatch detected. The delta realtime segment schema contains columns " + "that are not defined in the datasource schema. " + "This indicates a potential issue with schema updates on the Coordinator. " + "Please review relevant Coordinator metrics and logs for task communication to identify any issues." ) .addData("datasource", segmentId.getDataSource()) .addData("existingSignature", existingSignature) .addData("deltaSchema", segmentSchema) .addData("missingUpdateColumns", missingUpdateColumns) .emit(); } mergedColumnTypes.forEach(builder::add); return Optional.of(ROW_SIGNATURE_INTERNER.intern(builder.build())); } else { // delta update // we don't have the previous signature, but we received delta update, raise alert // this case shouldn't arise by design // this can happen if a new segment is added and this is the very first schema update, // implying we lost the absolute schema update // which implies either the absolute schema update was never computed or lost in polling log.makeAlert("Received delta schema update [%s] for a segment [%s] with no previous schema. ", segmentSchema, segmentId ).emit(); return Optional.empty(); } } /** * A segment schema can go missing. To ensure smooth functioning, segment is marked for refresh. * It need not be refreshed in the following scenarios: * - Tombstone segment, since they do not have any schema. * - Unused segment which hasn't been yet removed from the cache. * Any other scenario needs investigation. */ private void markSegmentForRefreshIfNeeded(DataSegment segment) { SegmentId id = segment.getId(); log.debug("SchemaMetadata for segmentId [%s] is absent.", id); if (segment.isTombstone()) { log.debug("Skipping refresh for tombstone segment [%s].", id); return; } ImmutableDruidDataSource druidDataSource = segmentsMetadataManager.getRecentDataSourcesSnapshot().getDataSource(segment.getDataSource()); if (druidDataSource != null && druidDataSource.getSegment(id) != null) { markSegmentAsNeedRefresh(id); } else { log.debug("Skipping refresh for unused segment [%s].", id); } } }
apache/sis
36,381
endorsed/src/org.apache.sis.feature/main/org/apache/sis/coverage/grid/GridCoverage2D.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.sis.coverage.grid; import java.util.List; import java.util.Arrays; import java.util.ArrayList; import java.util.Objects; import java.util.concurrent.atomic.AtomicReference; import java.text.NumberFormat; import java.text.FieldPosition; import java.io.IOException; import java.io.UncheckedIOException; import java.awt.Rectangle; import java.awt.image.BufferedImage; import java.awt.image.RenderedImage; import java.awt.image.SampleModel; import static java.lang.Math.min; import static java.lang.Math.addExact; import static java.lang.Math.subtractExact; import static java.lang.Math.toIntExact; import org.opengis.metadata.spatial.DimensionNameType; import org.opengis.util.NameFactory; import org.opengis.util.InternationalString; import org.opengis.util.FactoryException; import org.opengis.geometry.DirectPosition; import org.opengis.referencing.crs.CoordinateReferenceSystem; import org.opengis.referencing.operation.NoninvertibleTransformException; import org.opengis.referencing.operation.TransformException; import org.opengis.referencing.operation.MathTransform1D; import org.apache.sis.image.DataType; import org.apache.sis.coverage.SampleDimension; import org.apache.sis.image.internal.shared.ImageUtilities; import org.apache.sis.image.internal.shared.ReshapedImage; import org.apache.sis.feature.internal.Resources; import org.apache.sis.util.ArraysExt; import org.apache.sis.util.Debug; import org.apache.sis.util.iso.DefaultNameFactory; import org.apache.sis.util.collection.TableColumn; import org.apache.sis.util.collection.TreeTable; import org.apache.sis.util.resources.Vocabulary; import org.apache.sis.util.resources.Errors; // Specific to the main branch: import org.opengis.geometry.MismatchedDimensionException; import org.apache.sis.coverage.CannotEvaluateException; import org.apache.sis.coverage.PointOutsideCoverageException; /** * Basic access to grid data values backed by a two-dimensional {@link RenderedImage}. * While images are two-dimensional, the coverage <em>envelope</em> may have more dimensions. * In other words the rendered image can be a two-dimensional slice in a <var>n</var>-dimensional space. * The only restriction is that the {@linkplain GridGeometry#getExtent() grid extent} has a * {@linkplain GridExtent#getSize(int) size} equals to 1 in all dimensions except two of them. * * <h2>Example</h2> * A remote sensing image may be valid only over some time range * (the temporal period of the satellite passing over observed area). * Envelopes for such grid coverage can have three dimensions: * the two usual ones (horizontal extent along <var>x</var> and <var>y</var>), * and a third dimension for start time and end time (temporal extent along <var>t</var>). * This "two-dimensional" grid coverage can have any number of columns along <var>x</var> axis * and any number of rows along <var>y</var> axis, but only one plan along <var>t</var> axis. * This single plan can have a lower bound (the start time) and an upper bound (the end time). * * <h2>Image size and location</h2> * The {@linkplain RenderedImage#getWidth() image width} and {@linkplain RenderedImage#getHeight() height} * must be equal to the {@linkplain GridExtent#getSize(int) grid extent size} in the two dimensions of the slice. * However, the image origin ({@linkplain RenderedImage#getMinX() minimal x} and {@linkplain RenderedImage#getMinY() y} * values) does not need to be equal to the {@linkplain GridExtent#getLow(int) grid extent low values}; * a translation will be applied as needed. * * <h2>Image bands</h2> * Each band in an image is represented as a {@link SampleDimension}. * * @author Martin Desruisseaux (Geomatys) * @author Johann Sorel (Geomatys) * @author Alexis Manin (Geomatys) * @version 1.5 * @since 1.1 */ public class GridCoverage2D extends GridCoverage { /** * A constant for identifying code that relying on having 2 dimensions. * This is the minimal number of dimension required for this coverage. */ static final int BIDIMENSIONAL = 2; /** * The sample values stored as a {@code RenderedImage}. */ private final RenderedImage data; /** * Offsets to apply for converting grid coverage coordinates to image pixel coordinates. * This is {@link RenderedImage#getMinX()} − <code>{@linkplain GridExtent#getLow(int) * GridExtent.getLow}({@linkplain #xDimension})</code> for the <var>x</var> offset * and a similar formula for the <var>y</var> offset. */ private final long gridToImageX, gridToImageY; /** * Indices of extent dimensions corresponding to image <var>x</var> and <var>y</var> coordinates. * Typical values are 0 for {@code xDimension} and 1 for {@code yDimension}, but different values * are allowed. */ private final int xDimension, yDimension; /** * The two-dimensional components of the coordinate reference system and "grid to CRS" transform. * This is derived from {@link #gridGeometry} when first needed, retaining only the components at * dimension indices {@link #xDimension} and {@link #yDimension}. The same {@link AtomicReference} * instance may be shared with {@link #convertedView} and {@link #packedView}. * * @see #getGridGeometry2D() */ private final AtomicReference<GridGeometry> gridGeometry2D; /** * Creates a new grid coverage for the conversion of specified source coverage. * * @param source the coverage containing source values. * @param range the sample dimensions to assign to the converted grid coverage. * @param converters conversion from source to converted coverage, one transform per band. * @param isConverted whether this grid coverage is for converted or packed values. */ private GridCoverage2D(final GridCoverage2D source, final List<SampleDimension> range, final MathTransform1D[] converters, final boolean isConverted) { super(source.gridGeometry, range); final DataType bandType = ConvertedGridCoverage.getBandType(range, isConverted, source); data = convert(source.data, bandType, converters, Lazy.PROCESSOR); gridToImageX = source.gridToImageX; gridToImageY = source.gridToImageY; xDimension = source.xDimension; yDimension = source.yDimension; gridGeometry2D = source.gridGeometry2D; } /** * Creates a new grid coverage for the resampling of specified source coverage. * * @param source the coverage containing source values. * @param domain the grid extent, CRS and conversion from cell indices to CRS. * @param extent the {@code domain.getExtent()} value. * @param data the sample values as a {@link RenderedImage}, with one band for each sample dimension. */ GridCoverage2D(final GridCoverage source, final GridGeometry domain, final GridExtent extent, RenderedImage data) { super(source, domain); final int[] imageAxes = extent.getSubspaceDimensions(BIDIMENSIONAL); xDimension = imageAxes[0]; yDimension = imageAxes[1]; this.data = data = unwrapIfSameSize(data); gridToImageX = subtractExact(data.getMinX(), extent.getLow(xDimension)); gridToImageY = subtractExact(data.getMinY(), extent.getLow(yDimension)); gridGeometry2D = new AtomicReference<>(); } /** * Constructs a grid coverage using the same domain and range than the given coverage, but different data. * This constructor can be used when new data have been computed by an image processing operation, * but each pixel of the result have the same coordinates and the same units of measurement * than in the source coverage. * * @param source the coverage from which to copy grid geometry and sample dimensions. * @param data the sample values as a {@link RenderedImage}, with one band for each sample dimension. * @throws IllegalGridGeometryException if the image size is not consistent with the grid geometry. * @throws IllegalArgumentException if the image number of bands is not the same as the number of sample dimensions. * * @since 1.2 */ @SuppressWarnings("this-escape") // The invoked method does not store `this` and is not overrideable. public GridCoverage2D(final GridCoverage source, RenderedImage data) { super(source, source.getGridGeometry()); this.data = data = unwrapIfSameSize(Objects.requireNonNull(data)); final GridExtent extent = gridGeometry.getExtent(); final int[] imageAxes; if (source instanceof GridCoverage2D) { final var gs = (GridCoverage2D) source; xDimension = gs.xDimension; yDimension = gs.yDimension; gridToImageX = gs.gridToImageX; gridToImageY = gs.gridToImageY; gridGeometry2D = gs.gridGeometry2D; imageAxes = new int[] {xDimension, yDimension}; } else { imageAxes = extent.getSubspaceDimensions(BIDIMENSIONAL); xDimension = imageAxes[0]; yDimension = imageAxes[1]; gridToImageX = subtractExact(data.getMinX(), extent.getLow(xDimension)); gridToImageY = subtractExact(data.getMinY(), extent.getLow(yDimension)); gridGeometry2D = new AtomicReference<>(); } verifyImageSize(extent, data, imageAxes); verifyBandCount(super.getSampleDimensions(), data); } /** * Constructs a grid coverage using the specified domain, range and data. If the given domain does not * have an extent, then a default {@link GridExtent} will be computed from given image. Otherwise the * {@linkplain RenderedImage#getWidth() image width} and {@linkplain RenderedImage#getHeight() height} * must be equal to the {@linkplain GridExtent#getSize(int) grid extent size} in the two dimensions of * the slice. * * <p>The image origin ({@linkplain RenderedImage#getMinX() minimal x} and {@linkplain RenderedImage#getMinY() y} * values) can be anywhere; it does not need to be the same as the {@linkplain GridExtent#getLow(int) grid extent * low values}. Translations will be applied automatically when needed.</p> * * <p>This constructor throws an {@link IllegalGridGeometryException} if one * of the following errors is detected in the {@code domain} argument:</p> * <ul> * <li>The given domain has less than two dimensions.</li> * <li>The given domain has more than two dimensions having an * {@linkplain GridExtent#getSize(int) extent size} greater than 1.</li> * <li>The extent size along <var>x</var> and <var>y</var> axes is not equal to the image width and height.</li> * </ul> * * @param domain the grid extent (may be absent), CRS and conversion from cell indices. * If {@code null} a default grid geometry will be created with no CRS and identity conversion. * @param range sample dimensions for each image band. The size of this list must be equal to the number of bands. * If {@code null}, default sample dimensions will be created with no transfer function. * @param data the sample values as a {@link RenderedImage}, with one band for each sample dimension. * @throws IllegalGridGeometryException if the {@code domain} does not met the above-documented conditions. * @throws IllegalArgumentException if the image number of bands is not the same as the number of sample dimensions. * @throws ArithmeticException if the distance between grid location and image location exceeds the {@code long} capacity. * * @see GridCoverageBuilder */ public GridCoverage2D(GridGeometry domain, final List<? extends SampleDimension> range, RenderedImage data) { /* * The complex nesting of method calls below is a workaround * while waiting for JEP 447: Statements before super(…). */ super(domain = addExtentIfAbsent(domain, data = unwrapIfSameSize(data)), defaultIfAbsent(range, data, ImageUtilities.getNumBands(data))); this.data = Objects.requireNonNull(data); /* * Find indices of the two dimensions of the slice. Those dimensions are usually 0 for x and 1 for y, * but not necessarily. A two dimensional CRS will be extracted for those dimensions later if needed. */ final GridExtent extent = domain.getExtent(); final int[] imageAxes; try { imageAxes = extent.getSubspaceDimensions(BIDIMENSIONAL); } catch (CannotEvaluateException e) { throw new IllegalGridGeometryException(e.getMessage(), e); } xDimension = imageAxes[0]; yDimension = imageAxes[1]; gridToImageX = subtractExact(data.getMinX(), extent.getLow(xDimension)); gridToImageY = subtractExact(data.getMinY(), extent.getLow(yDimension)); verifyImageSize(extent, data, imageAxes); verifyBandCount(range, data); gridGeometry2D = new AtomicReference<>(); } /** * Returns the wrapped image if the only difference is a translation, or {@code data} otherwise. */ private static RenderedImage unwrapIfSameSize(RenderedImage data) { if (data instanceof ReshapedImage) { final var source = ((ReshapedImage) data).source; if (source.getWidth() == data.getWidth() && source.getHeight() == data.getHeight()) { data = source; } } return data; } /** * If the given domain does not have a {@link GridExtent}, creates a new grid geometry * with an extent computed from the given image. The new grid will start at the same * location than the image and will have the same size. * * @param domain the domain to complete. May be {@code null}. * @param data user supplied image, or {@code null} if missing. * @return the potentially completed domain (may be {@code null}). */ static GridGeometry addExtentIfAbsent(GridGeometry domain, final RenderedImage data) { if (data != null) { domain = addExtentIfAbsent(domain, ImageUtilities.getBounds(data)); } return domain; } /** * If the given domain does not have a {@link GridExtent}, creates a new grid geometry * with an extent computed from the given image bounds. The new grid will start at the * same location as the image and will have the same size. * * <p>This method does nothing if the given domain already has an extent; * it does not verify that the extent is consistent with image size. * This verification should be done by the caller.</p> * * @param domain the domain to complete. May be {@code null}. * @param bounds image or raster bounds (cannot be {@code null}). * @return the potentially completed domain (may be {@code null}). */ static GridGeometry addExtentIfAbsent(GridGeometry domain, final Rectangle bounds) { if (domain == null) { GridExtent extent = new GridExtent(bounds); domain = new GridGeometry(extent, PixelInCell.CELL_CENTER, null, null); } else if (!domain.isDefined(GridGeometry.EXTENT)) { final int dimension = domain.getDimension(); if (dimension >= BIDIMENSIONAL) { CoordinateReferenceSystem crs = null; if (domain.isDefined(GridGeometry.CRS)) { crs = domain.getCoordinateReferenceSystem(); } final GridExtent extent = createExtent(dimension, bounds, crs); if (domain.isDefined(GridGeometry.GRID_TO_CRS)) try { domain = new GridGeometry(domain, extent, null); } catch (TransformException e) { throw new IllegalGridGeometryException(e); // Should never happen. } else { domain = new GridGeometry(extent, domain.envelope, GridOrientation.HOMOTHETY); } } } return domain; } /** * Creates a grid extent with the low and high coordinates of the given image bounds. * The coordinate reference system is used for extracting grid axis names, in particular * the {@link DimensionNameType#VERTICAL} and {@link DimensionNameType#TIME} dimensions. * The {@link DimensionNameType#COLUMN} and {@link DimensionNameType#ROW} dimensions can * not be inferred from CRS analysis; they are added from knowledge that we have an image. * * @param dimension number of dimensions. * @param bounds bounds of the image for which to create a grid extent. * @param crs coordinate reference system, or {@code null} if none. */ private static GridExtent createExtent(final int dimension, final Rectangle bounds, final CoordinateReferenceSystem crs) { final var low = new long[dimension]; final var high = new long[dimension]; low [0] = bounds.x; low [1] = bounds.y; high[0] = bounds.width + low[0] - 1; // Inclusive. high[1] = bounds.height + low[1] - 1; DimensionNameType[] axisTypes = GridExtent.typeFromAxes(crs, dimension); if (axisTypes == null) { axisTypes = new DimensionNameType[dimension]; } if (!ArraysExt.contains(axisTypes, DimensionNameType.COLUMN)) axisTypes[0] = DimensionNameType.COLUMN; if (!ArraysExt.contains(axisTypes, DimensionNameType.ROW)) axisTypes[1] = DimensionNameType.ROW; return new GridExtent(axisTypes, low, high, true); } /** * Verifies that the domain is consistent with image size. * We do not verify image location; it can be anywhere. */ private static void verifyImageSize(final GridExtent extent, final RenderedImage data, final int[] imageAxes) { for (int i=0; i<BIDIMENSIONAL; i++) { final int imageSize = (i == 0) ? data.getWidth() : data.getHeight(); final long gridSize = extent.getSize(imageAxes[i]); if (imageSize != gridSize) { throw new IllegalGridGeometryException(Resources.format(Resources.Keys.MismatchedImageSize_3, i, imageSize, gridSize)); } } } /** * If the sample dimensions are null, creates default sample dimensions with default names. * The default names are "gray", "red, green, blue" or "cyan, magenta, yellow" if the color * model is identified as such, or numbers if the color model is not recognized. * * @param range the list of sample dimensions, potentially null. * @param data the image for which to build sample dimensions, or {@code null}. * @param numBands the number of bands in the given image, or 0 if none. * @return the given list of sample dimensions if it was non-null, or a default list otherwise. */ static List<? extends SampleDimension> defaultIfAbsent(List<? extends SampleDimension> range, final RenderedImage data, final int numBands) { if (range == null) { final short[] names; if (data != null) { names = ImageUtilities.bandNames(data.getColorModel(), data.getSampleModel()); } else { names = ArraysExt.EMPTY_SHORT; } final SampleDimension[] sd = new SampleDimension[numBands]; final NameFactory factory = DefaultNameFactory.provider(); for (int i=0; i<numBands; i++) { final InternationalString name; final short k; if (i < names.length && (k = names[i]) != 0) { name = Vocabulary.formatInternational(k); } else { name = Vocabulary.formatInternational(Vocabulary.Keys.Band_1, i+1); } sd[i] = new SampleDimension(factory.createLocalName(null, name), null, List.of()); } range = Arrays.asList(sd); } return range; } /** * Verifies that the number of bands in the image is equal to the number of sample dimensions. * The number of bands is fetched from the sample model, which in theory shall never be null. * However, this class has a little bit of tolerance to missing sample model. * It may happen when the image is used only as a matrix storage. */ private static void verifyBandCount(final List<? extends SampleDimension> range, final RenderedImage data) { if (range != null) { final SampleModel sm = data.getSampleModel(); if (sm != null) { final int nb = sm.getNumBands(); final int ns = range.size(); if (nb != ns) { throw new IllegalArgumentException(Resources.format(Resources.Keys.MismatchedBandCount_2, nb, ns)); } } } } /** * Returns the constant identifying the primitive type used for storing sample values. */ @Override final DataType getBandType() { return DataType.forBands(data); } /** * Returns the two-dimensional part of this grid geometry. * If the {@linkplain #getGridGeometry() complete geometry} is already two-dimensional, * then this method returns the same geometry. Otherwise it returns a geometry for the two first * axes having a {@linkplain GridExtent#getSize(int) size} greater than 1 in the grid envelope. * Note that those axes are guaranteed to appear in the same order as in the complete geometry. * * @return the two-dimensional part of the grid geometry. * * @see #getGridGeometry() * @see GridGeometry#selectDimensions(int[]) */ public GridGeometry getGridGeometry2D() { GridGeometry g = gridGeometry2D.get(); if (g == null) { g = gridGeometry.selectDimensions(xDimension, yDimension); if (!gridGeometry2D.compareAndSet(null, g)) { GridGeometry other = gridGeometry2D.get(); if (other != null) return other; } } return g; } /** * Creates a grid coverage that contains real values or sample values, * depending if {@code converted} is {@code true} or {@code false} respectively. * This method is invoked by the default implementation of {@link #forConvertedValues(boolean)} * when first needed. * * @param converted {@code true} for a coverage containing converted values, * or {@code false} for a coverage containing packed values. * @return a coverage containing converted or packed values, depending on {@code converted} argument value. */ @Override protected GridCoverage createConvertedValues(final boolean converted) { try { final List<SampleDimension> sources = getSampleDimensions(); final List<SampleDimension> targets = new ArrayList<>(sources.size()); final MathTransform1D[] converters = ConvertedGridCoverage.converters(sources, targets, converted); return (converters == null) ? this : new GridCoverage2D(this, targets, converters, converted); } catch (NoninvertibleTransformException e) { throw new CannotEvaluateException(e.getMessage(), e); } } /** * Creates a new function for computing or interpolating sample values at given locations. * * <h4>Multi-threading</h4> * {@code Evaluator}s are not thread-safe. For computing sample values concurrently, * a new {@code Evaluator} instance should be created for each thread. * * @since 1.1 */ @Override public Evaluator evaluator() { return new PixelAccessor(); } /** * Implementation of evaluator returned by {@link #evaluator()}. */ private final class PixelAccessor extends DefaultEvaluator { /** * Creates a new evaluator for the enclosing coverage. */ PixelAccessor() { super(GridCoverage2D.this); } /** * Returns a sequence of double values for a given point in the coverage. * The CRS of the given point may be any coordinate reference system, * or {@code null} for the same CRS as the coverage. */ @Override public double[] apply(final DirectPosition point) throws CannotEvaluateException { try { final FractionalGridCoordinates gc = toGridPosition(point); try { final int x = toIntExact(addExact(gc.getCoordinateValue(xDimension), gridToImageX)); final int y = toIntExact(addExact(gc.getCoordinateValue(yDimension), gridToImageY)); return evaluate(data, x, y); } catch (ArithmeticException | IndexOutOfBoundsException | DisjointExtentException ex) { if (isNullIfOutside()) { return null; } throw (PointOutsideCoverageException) new PointOutsideCoverageException( gc.pointOutsideCoverage(gridGeometry.extent)).initCause(ex); } } catch (PointOutsideCoverageException ex) { throw ex; } catch (RuntimeException | FactoryException | TransformException ex) { throw new CannotEvaluateException(ex.getMessage(), ex); } } } /** * Returns a grid data region as a rendered image. The {@code sliceExtent} argument * specifies the area of interest and may be {@code null} for requesting the whole image. * The coordinates given by {@link RenderedImage#getMinX()} and {@link RenderedImage#getMinY() getMinY()} * will be the image location <em>relative to</em> the location specified in {@code sliceExtent} * {@linkplain GridExtent#getLow(int) low coordinates} (see super-class javadoc for more discussion). * The {@linkplain RenderedImage#getWidth() image width} and {@linkplain RenderedImage#getHeight() height} will be * the {@code sliceExtent} {@linkplain GridExtent#getSize(int) sizes} if this method can honor exactly the request, * but this method is free to return a smaller or larger image if doing so reduce the number of data to create or copy. * This implementation returns a view as much as possible, without copying sample values. * * @param sliceExtent area of interest, or {@code null} for the whole image. * @return the grid slice as a rendered image. Image location is relative to {@code sliceExtent}. * @throws MismatchedDimensionException if the given extent does not have the same number of dimensions as this coverage. * @throws DisjointExtentException if the given extent does not intersect this grid coverage. * @throws CannotEvaluateException if this method cannot produce the rendered image for another reason. * * @see BufferedImage#getSubimage(int, int, int, int) */ @Override @SuppressWarnings("AssertWithSideEffects") public RenderedImage render(GridExtent sliceExtent) throws CannotEvaluateException { final GridExtent extent = gridGeometry.extent; if (sliceExtent == null) { if (extent == null || (data.getMinX() == 0 && data.getMinY() == 0)) { return data; } sliceExtent = extent; } else { final int expected = gridGeometry.getDimension(); final int dimension = sliceExtent.getDimension(); if (expected != dimension) { throw new MismatchedDimensionException(Errors.format( Errors.Keys.MismatchedDimension_3, "sliceExtent", expected, dimension)); } } if (extent != null) { final int n = min(sliceExtent.getDimension(), extent.getDimension()); for (int i=0; i<n; i++) { if (sliceExtent.getHigh(i) < extent.getLow(i) || sliceExtent.getLow(i) > extent.getHigh(i)) { throw new DisjointExtentException(extent, sliceExtent, i); } } } try { /* * Convert the coordinates from this grid coverage coordinate system to the image coordinate system. * The coverage coordinates may require 64 bits integers, but after translation the (x,y) coordinates * should be in 32 bits integers range. Do not cast to 32 bits now however, this will be done later. */ final long xmin = addExact(sliceExtent.getLow (xDimension), gridToImageX); final long ymin = addExact(sliceExtent.getLow (yDimension), gridToImageY); final long xmax = addExact(sliceExtent.getHigh(xDimension), gridToImageX); // Inclusive final long ymax = addExact(sliceExtent.getHigh(yDimension), gridToImageY); /* * BufferedImage.getSubimage() returns a new image with upper-left coordinate at (0,0), * which is exactly what this method contract is requesting provided that the requested * upper-left point is inside the image. */ if (data instanceof BufferedImage) { var result = (BufferedImage) data; /* * BufferedImage origin should be (0, 0). But for consistency with image API, * we consider it as variable. */ final long ix = result.getMinX(); final long iy = result.getMinY(); if (xmin >= ix && ymin >= iy) { final int width = result.getWidth(); final int height = result.getHeight(); /* * Result of `ix + width` requires at most 33 bits for any `ix` value (same for y axis). * Subtractions by `xmin` and `ymin` never overflow if `ix` and `iy` are zero or positive, * which should always be the case with BufferedImage. The +1 is applied after subtraction * instead of on `xmax` and `ymax` for avoiding overflow, since the result of `min(…)` * uses at most 33 bits. */ final int nx = toIntExact(min(xmax, ix + width - 1) - xmin + 1); final int ny = toIntExact(min(ymax, iy + height - 1) - ymin + 1); if ((xmin | ymin) != 0 || nx != width || ny != height) { result = result.getSubimage(toIntExact(xmin), toIntExact(ymin), nx, ny); } /* * Workaround for https://bugs.openjdk.java.net/browse/JDK-8166038 * If BufferedImage cannot be used, fallback on ReshapedImage * at the cost of returning an image larger than necessary. * This workaround can be removed on JDK17. */ if (org.apache.sis.image.internal.shared.TilePlaceholder.PENDING_JDK_FIX) { if (result.getTileGridXOffset() == ix && result.getTileGridYOffset() == iy) { return result; } } } } /* * Return the backing image almost as-is (with potentially just a wrapper) for avoiding to copy data. * As per method contract, we shall set the (x,y) location to the difference between requested region * and actual region of the returned image. For example if the user requested an image starting at * (5,5) but the image to return starts at (1,1), then we need to set its location to (-4,-4). * * Note: we could do a special case when the result has only one tile and create a BufferedImage * with Raster.createChild(…), but that would force us to invoke RenderedImage.getTile(…) which * may force data loading earlier than desired. */ final var result = new ReshapedImage(data, xmin, ymin, xmax, ymax); return result.isIdentity() ? result.source : result; } catch (ArithmeticException e) { throw new CannotEvaluateException(e.getMessage(), e); } } /** * Appends a "data layout" branch (if it exists) to the tree representation of this coverage. * That branch will be inserted between "coverage domain" and "sample dimensions" branches. * * @param root root of the tree where to add a branch. * @param vocabulary localized resources for vocabulary. * @param column the single column where to write texts. */ @Debug @Override void appendDataLayout(final TreeTable.Node root, final Vocabulary vocabulary, final TableColumn<CharSequence> column) { final TreeTable.Node branch = root.newChild(); branch.setValue(column, vocabulary.getString(Vocabulary.Keys.ImageLayout)); final var nf = NumberFormat.getIntegerInstance(vocabulary.getLocale()); final var pos = new FieldPosition(0); final var buffer = new StringBuffer(); write: for (int item=0; ; item++) try { switch (item) { case 0: { vocabulary.appendLabel(Vocabulary.Keys.Origin, buffer); nf.format(data.getMinX(), buffer.append(' '), pos); nf.format(data.getMinY(), buffer.append(", "), pos); break; } case 1: { final int tx = data.getTileWidth(); final int ty = data.getTileHeight(); if (tx == data.getWidth() && ty == data.getHeight()) continue; vocabulary.appendLabel(Vocabulary.Keys.TileSize, buffer); nf.format(tx, buffer.append( ' ' ), pos); nf.format(ty, buffer.append(" × "), pos); break; } case 2: { final String type = ImageUtilities.getDataTypeName(data.getSampleModel()); if (type == null) continue; vocabulary.appendLabel(Vocabulary.Keys.DataType, buffer); buffer.append(' ').append(type); break; } case 3: { final short t = ImageUtilities.getTransparencyDescription(data.getColorModel()); if (t != 0) { final String desc = Resources.forLocale(vocabulary.getLocale()).getString(t); branch.newChild().setValue(column, desc); } continue; } default: break write; } branch.newChild().setValue(column, buffer.toString()); buffer.setLength(0); } catch (IOException e) { throw new UncheckedIOException(e); // Should never happen since we are writing to StringBuilder. } } }
google/error-prone
35,218
core/src/test/java/com/google/errorprone/bugpatterns/UnnecessaryDefaultInEnumSwitchTest.java
/* * Copyright 2017 The Error Prone Authors. * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.errorprone.bugpatterns; import static com.google.errorprone.BugCheckerRefactoringTestHelper.TestMode.TEXT_MATCH; import com.google.errorprone.BugCheckerRefactoringTestHelper; import com.google.errorprone.CompilationTestHelper; import org.junit.Test; import org.junit.runner.RunWith; import org.junit.runners.JUnit4; /** {@link UnnecessaryDefaultInEnumSwitch}Test */ @RunWith(JUnit4.class) public class UnnecessaryDefaultInEnumSwitchTest { private final CompilationTestHelper compilationHelper = CompilationTestHelper.newInstance(UnnecessaryDefaultInEnumSwitch.class, getClass()); private final BugCheckerRefactoringTestHelper refactoringTestHelper = BugCheckerRefactoringTestHelper.newInstance(UnnecessaryDefaultInEnumSwitch.class, getClass()); @Test public void switchCannotComplete() { refactoringTestHelper .addInputLines( "in/Test.java", """ class Test { enum Case { ONE, TWO, THREE } boolean m(Case c) { switch (c) { case ONE: case TWO: case THREE: return true; default: // This is a comment throw new AssertionError(c); } } } """) .addOutputLines( "out/Test.java", """ class Test { enum Case { ONE, TWO, THREE } boolean m(Case c) { switch (c) { case ONE: case TWO: case THREE: return true; } // This is a comment throw new AssertionError(c); } } """) .doTest(TEXT_MATCH); } @Test public void switchCannotCompleteUnrecognized() { refactoringTestHelper .addInputLines( "in/Test.java", """ class Test { enum Case { ONE, TWO, THREE, UNRECOGNIZED } boolean m(Case c) { switch (c) { case ONE: case TWO: case THREE: return true; default: // This is a comment throw new AssertionError(c); } } } """) .addOutputLines( "out/Test.java", """ class Test { enum Case { ONE, TWO, THREE, UNRECOGNIZED } boolean m(Case c) { switch (c) { case ONE: case TWO: case THREE: return true; case UNRECOGNIZED: break; } // This is a comment throw new AssertionError(c); } } """) .doTest(TEXT_MATCH); } @Test public void emptyDefault() { refactoringTestHelper .addInputLines( "in/Test.java", """ class Test { enum Case { ONE, TWO, THREE } boolean m(Case c) { switch (c) { case ONE: case TWO: case THREE: return true; default: } return false; } } """) .addOutputLines( "out/Test.java", """ class Test { enum Case { ONE, TWO, THREE } boolean m(Case c) { switch (c) { case ONE: case TWO: case THREE: return true; } return false; } } """) .doTest(); } @Test public void emptyDefaultUnrecognized() { refactoringTestHelper .addInputLines( "in/Test.java", """ class Test { enum Case { ONE, TWO, THREE, UNRECOGNIZED } boolean m(Case c) { switch (c) { case ONE: case TWO: case THREE: return true; default: } return false; } } """) .addOutputLines( "out/Test.java", """ class Test { enum Case { ONE, TWO, THREE, UNRECOGNIZED } boolean m(Case c) { switch (c) { case ONE: case TWO: case THREE: return true; case UNRECOGNIZED: // continue below } return false; } } """) .doTest(); } @Test public void defaultBreak() { refactoringTestHelper .addInputLines( "in/Test.java", """ class Test { enum Case { ONE, TWO, THREE } boolean m(Case c) { switch (c) { case ONE: case TWO: case THREE: return true; default: break; } return false; } } """) .addOutputLines( "out/Test.java", """ class Test { enum Case { ONE, TWO, THREE } boolean m(Case c) { switch (c) { case ONE: case TWO: case THREE: return true; } return false; } } """) .doTest(); } @Test public void defaultBreakUnrecognized() { refactoringTestHelper .addInputLines( "in/Test.java", """ class Test { enum Case { ONE, TWO, THREE, UNRECOGNIZED } boolean m(Case c) { switch (c) { case ONE: case TWO: case THREE: return true; default: break; } return false; } } """) .addOutputLines( "out/Test.java", """ class Test { enum Case { ONE, TWO, THREE, UNRECOGNIZED } boolean m(Case c) { switch (c) { case ONE: case TWO: case THREE: return true; case UNRECOGNIZED: // continue below } return false; } } """) .doTest(); } @Test public void completes_noUnassignedVars_priorCaseExits() { refactoringTestHelper .addInputLines( "in/Test.java", """ class Test { enum Case { ONE, TWO, THREE } boolean m(Case c) { switch (c) { case ONE: case TWO: break; case THREE: return true; default: throw new AssertionError(c); } return false; } } """) .addOutputLines( "out/Test.java", """ class Test { enum Case { ONE, TWO, THREE } boolean m(Case c) { switch (c) { case ONE: case TWO: break; case THREE: return true; } return false; } } """) .doTest(); } @Test public void completes_noUnassignedVars_priorCaseExitsUnrecognized() { refactoringTestHelper .addInputLines( "in/Test.java", """ class Test { enum Case { ONE, TWO, THREE, UNRECOGNIZED } boolean m(Case c) { switch (c) { case ONE: case TWO: break; case THREE: return true; default: throw new AssertionError(c); } return false; } } """) .addOutputLines( "out/Test.java", """ class Test { enum Case { ONE, TWO, THREE, UNRECOGNIZED } boolean m(Case c) { switch (c) { case ONE: case TWO: break; case THREE: return true; case UNRECOGNIZED: throw new AssertionError(c); } return false; } } """) .doTest(TEXT_MATCH); } @Test public void completes_noUnassignedVars_priorCaseDoesntExit() { refactoringTestHelper .addInputLines( "in/Test.java", """ class Test { enum Case { ONE, TWO, THREE } boolean m(Case c) { switch (c) { case ONE: case TWO: return true; case THREE: default: // This is a comment System.out.println("Test"); } return false; } } """) .addOutputLines( "out/Test.java", """ class Test { enum Case { ONE, TWO, THREE } boolean m(Case c) { switch (c) { case ONE: case TWO: return true; case THREE: // This is a comment System.out.println("Test"); } return false; } } """) .doTest(); } @Test public void completes_noUnassignedVars_priorCaseDoesntExitUnrecognized() { refactoringTestHelper .addInputLines( "in/Test.java", """ class Test { enum Case { ONE, TWO, THREE, UNRECOGNIZED } boolean m(Case c) { switch (c) { case ONE: case TWO: return true; case THREE: default: // This is a comment System.out.println("Test"); } return false; } } """) .addOutputLines( "out/Test.java", """ class Test { enum Case { ONE, TWO, THREE, UNRECOGNIZED } boolean m(Case c) { switch (c) { case ONE: case TWO: return true; case THREE: case UNRECOGNIZED: // This is a comment System.out.println("Test"); } return false; } } """) .doTest(); } @Test public void completes_unassignedVars() { compilationHelper .addSourceLines( "Test.java", """ class Test { enum Case { ONE, TWO, THREE } boolean m(Case c) { int x; switch (c) { case ONE: case TWO: x = 1; break; case THREE: x = 2; break; default: x = 3; } return x == 1; } } """) .doTest(); } @Test public void completes_unassignedVarsUnrecognized() { compilationHelper .addSourceLines( "Test.java", """ class Test { enum Case { ONE, TWO, THREE, UNRECOGNIZED } boolean m(Case c) { int x; switch (c) { case ONE: case TWO: x = 1; break; case THREE: x = 2; break; default: x = 3; } return x == 1; } } """) .doTest(); } @Test public void notExhaustive() { compilationHelper .addSourceLines( "Test.java", """ class Test { enum Case { ONE, TWO, THREE } boolean m(Case c) { switch (c) { case ONE: case TWO: return true; default: throw new AssertionError(c); } } } """) .doTest(); } @Test public void notExhaustiveUnrecognized() { refactoringTestHelper .addInputLines( "Test.java", """ class Test { enum Case { ONE, TWO, UNRECOGNIZED } boolean m(Case c) { switch (c) { case ONE: case TWO: return true; default: throw new AssertionError(c); } } } """) .addOutputLines( "Test.java", """ class Test { enum Case { ONE, TWO, UNRECOGNIZED } boolean m(Case c) { switch (c) { case ONE: case TWO: return true; case UNRECOGNIZED: break; } throw new AssertionError(c); } } """) .doTest(TEXT_MATCH); } @Test public void notExhaustive2() { refactoringTestHelper .addInputLines( "Test.java", """ class Test { enum Case { ONE, TWO, THREE } boolean m(boolean f, Case c) { if (f) { switch (c) { case ONE: case TWO: case THREE: return true; default: return false; } } else { return false; } } } """) .addOutputLines( "Test.java", """ class Test { enum Case { ONE, TWO, THREE } boolean m(boolean f, Case c) { if (f) { switch (c) { case ONE: case TWO: case THREE: return true; } return false; } else { return false; } } } """) .doTest(); } @Test public void notExhaustive2Unrecognized() { refactoringTestHelper .addInputLines( "Test.java", """ class Test { enum Case { ONE, TWO, THREE, UNRECOGNIZED } boolean m(boolean f, Case c) { if (f) { switch (c) { case ONE: case TWO: case THREE: return true; default: return false; } } else { return false; } } } """) .addOutputLines( "Test.java", """ class Test { enum Case { ONE, TWO, THREE, UNRECOGNIZED } boolean m(boolean f, Case c) { if (f) { switch (c) { case ONE: case TWO: case THREE: return true; case UNRECOGNIZED: break; } return false; } else { return false; } } } """) .doTest(); } @Test public void defaultForSkew_switchStatement() { compilationHelper .addSourceLines( "Test.java", """ class Test { enum Case { ONE, TWO, THREE } boolean m(Case c) { switch (c) { case ONE: case TWO: case THREE: return true; default: // in case of library skew return false; } } boolean o(Case c) { switch (c) { // in case of library skew default: return false; case ONE: case TWO: case THREE: return true; } } } """) .doTest(); } @Test public void defaultForSkew_switchStatement_body() { compilationHelper .addSourceLines( "Test.java", """ class Test { enum Case { ONE, TWO, THREE } boolean m(Case c) { switch (c) { case ONE, TWO, THREE -> { return true; } // in case of library skew default -> { return false; } } } } """) .doTest(); } @Test public void defaultForSkew_switchStatement_noFollowingStatement() { compilationHelper .addSourceLines( "Test.java", """ class Test { enum Case { ONE, TWO, THREE } void m(Case c) { switch (c) { case ONE: case TWO: case THREE: break; default: // skew } } } """) .doTest(); } @Test public void defaultForSkew_switchExpression() { compilationHelper .addSourceLines( "Test.java", """ class Test { enum Case { ONE, TWO, THREE } void m(Case c) { boolean unused; unused = switch (c) { case ONE, TWO -> true; case THREE -> false; // present for skew default -> false; }; unused = switch (c) { case ONE, TWO -> true; case THREE -> false; default -> // present for skew false; }; unused = switch (c) { // present for skew default -> false; case ONE, TWO -> true; case THREE -> false; }; } } """) .doTest(); } @Test public void unrecognizedIgnore() { refactoringTestHelper .addInputLines( "Test.java", """ class Test { enum Case { ONE, TWO, UNRECOGNIZED } boolean m(Case c) { switch (c) { case ONE: return true; default: throw new AssertionError(c); } } } """) .expectUnchanged() .doTest(TEXT_MATCH); } @Test public void defaultAboveCaseUnrecognized() { refactoringTestHelper .addInputLines( "in/Test.java", """ class Test { enum Case { ONE, TWO, THREE, UNRECOGNIZED } boolean m(Case c) { switch (c) { case ONE: case TWO: return true; default: case THREE: // This is a comment System.out.println("Test"); } return false; } } """) .addOutputLines( "out/Test.java", """ class Test { enum Case { ONE, TWO, THREE, UNRECOGNIZED } boolean m(Case c) { switch (c) { case ONE: case TWO: return true; case UNRECOGNIZED: case THREE: // This is a comment System.out.println("Test"); } return false; } } """) .doTest(); } @Test public void messageMovedAssertion() { compilationHelper .addSourceLines( "in/Test.java", """ class Test { enum Case { ONE } boolean m(Case c) { switch (c) { case ONE: return true; // BUG: Diagnostic contains: after the switch statement default: throw new AssertionError(c); } } } """) .doTest(); } @Test public void messageRemovedAssertion() { compilationHelper .addSourceLines( "in/Test.java", """ class Test { enum Case { ONE } void m(Case c) { int i = 0; switch (c) { case ONE: i = 1; break; // BUG: Diagnostic contains: case can be omitted default: throw new AssertionError(); } } } """) .doTest(); } @Test public void switchCompletesUnrecognized() { refactoringTestHelper .addInputLines( "in/Test.java", """ class Test { enum Case { ONE, TWO, THREE, UNRECOGNIZED } void m(Case c) { switch (c) { case ONE: break; case TWO: break; case THREE: break; default: // This is a comment throw new AssertionError(c); } } } """) .addOutputLines( "out/Test.java", """ class Test { enum Case { ONE, TWO, THREE, UNRECOGNIZED } void m(Case c) { switch (c) { case ONE: break; case TWO: break; case THREE: break; case UNRECOGNIZED: // This is a comment throw new AssertionError(c); } } } """) .doTest(TEXT_MATCH); } @Test public void messages() { compilationHelper .addSourceLines( "Test.java", """ class Test { enum NormalEnum { A, B } enum ProtoEnum { ONE, TWO, UNRECOGNIZED } void normal(NormalEnum e) { switch (e) { case A: case B: // BUG: Diagnostic contains: default case can be omitted default: break; } } void proto(ProtoEnum e) { switch (e) { case ONE: case TWO: // BUG: Diagnostic contains: UNRECOGNIZED default: break; } } } """) .doTest(); } @Test public void defaultCaseKindRule() { refactoringTestHelper .addInputLines( "Test.java", """ class Test { enum Case { ONE, TWO } void m(Case c) { switch (c) { case ONE -> {} case TWO -> {} default -> {} } } } """) .addOutputLines( "Test.java", """ class Test { enum Case { ONE, TWO } void m(Case c) { switch (c) { case ONE -> {} case TWO -> {} } } } """) .doTest(); } @Test public void defaultCaseKindRule_initialisation() { compilationHelper .addSourceLines( "Test.java", """ class Test { enum Case { ONE, TWO } void m(Case c) { int x; switch (c) { case ONE -> x = 1; case TWO -> x = 2; // Removing this would not compile. default -> throw new AssertionError(); } System.out.println(x); } } """) .doTest(); } @Test public void unrecognizedCaseKindRule() { // NOTE(ghm): This test is unhappy on 17 for test frameworky reasons. refactoringTestHelper .addInputLines( "Test.java", """ class Test { enum Case { ONE, TWO, UNRECOGNIZED } void m(Case c) { switch (c) { case ONE -> {} case TWO -> {} default -> {} } } } """) .addOutputLines( "Test.java", """ class Test { enum Case { ONE, TWO, UNRECOGNIZED } void m(Case c) { switch (c) { case ONE -> {} case TWO -> {} case UNRECOGNIZED -> {} } } } """) .doTest(); } @Test public void unrecognizedCaseKindRule_initialization() { compilationHelper .addSourceLines( "Test.java", """ class Test { enum Case { ONE, TWO, UNRECOGNIZED } void m(Case c) { int x; switch (c) { case ONE -> x = 1; case TWO -> x = 2; // Removing this would not compile. default -> throw new AssertionError(); } System.out.println(x); } } """) .doTest(); } @Test public void multipleLabels() { refactoringTestHelper .addInputLines( "Test.java", """ class Test { enum Type { FOO, BAR, BAZ, } public static void main(String[] args) { var type = Type.valueOf(args[0]); switch (type) { case FOO -> { System.out.println("Hi foo"); } case BAR, BAZ -> {} default -> throw new AssertionError(type); } } } """) .addOutputLines( "Test.java", """ class Test { enum Type { FOO, BAR, BAZ, } public static void main(String[] args) { var type = Type.valueOf(args[0]); switch (type) { case FOO -> { System.out.println("Hi foo"); } case BAR, BAZ -> {} } } } """) .doTest(); } @Test public void expressionSwitch() { refactoringTestHelper .addInputLines( "in/Test.java", """ class Test { enum Case { ONE, TWO, } boolean m(Case c) { return switch (c) { case ONE -> true; case TWO -> false; default -> throw new AssertionError(); }; } } """) .addOutputLines( "out/Test.java", """ class Test { enum Case { ONE, TWO, } boolean m(Case c) { return switch (c) { case ONE -> true; case TWO -> false; }; } } """) .doTest(); } @Test public void expressionSwitchUnrecognized() { refactoringTestHelper .addInputLines( "in/Test.java", """ class Test { enum Case { ONE, TWO, UNRECOGNIZED } boolean m(Case c) { return switch (c) { case ONE -> true; case TWO -> false; default -> throw new AssertionError(); }; } } """) .addOutputLines( "out/Test.java", """ class Test { enum Case { ONE, TWO, UNRECOGNIZED } boolean m(Case c) { return switch (c) { case ONE -> true; case TWO -> false; case UNRECOGNIZED -> throw new AssertionError(); }; } } """) .doTest(); } }
googleapis/google-cloud-java
35,984
java-essential-contacts/proto-google-cloud-essential-contacts-v1/src/main/java/com/google/cloud/essentialcontacts/v1/UpdateContactRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/essentialcontacts/v1/service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.essentialcontacts.v1; /** * * * <pre> * Request message for the UpdateContact method. * </pre> * * Protobuf type {@code google.cloud.essentialcontacts.v1.UpdateContactRequest} */ public final class UpdateContactRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.essentialcontacts.v1.UpdateContactRequest) UpdateContactRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateContactRequest.newBuilder() to construct. private UpdateContactRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateContactRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateContactRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.essentialcontacts.v1.Service .internal_static_google_cloud_essentialcontacts_v1_UpdateContactRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.essentialcontacts.v1.Service .internal_static_google_cloud_essentialcontacts_v1_UpdateContactRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.essentialcontacts.v1.UpdateContactRequest.class, com.google.cloud.essentialcontacts.v1.UpdateContactRequest.Builder.class); } private int bitField0_; public static final int CONTACT_FIELD_NUMBER = 2; private com.google.cloud.essentialcontacts.v1.Contact contact_; /** * * * <pre> * Required. The contact resource to replace the existing saved contact. Note: * the email address of the contact cannot be modified. * </pre> * * <code> * .google.cloud.essentialcontacts.v1.Contact contact = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the contact field is set. */ @java.lang.Override public boolean hasContact() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The contact resource to replace the existing saved contact. Note: * the email address of the contact cannot be modified. * </pre> * * <code> * .google.cloud.essentialcontacts.v1.Contact contact = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The contact. */ @java.lang.Override public com.google.cloud.essentialcontacts.v1.Contact getContact() { return contact_ == null ? com.google.cloud.essentialcontacts.v1.Contact.getDefaultInstance() : contact_; } /** * * * <pre> * Required. The contact resource to replace the existing saved contact. Note: * the email address of the contact cannot be modified. * </pre> * * <code> * .google.cloud.essentialcontacts.v1.Contact contact = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.essentialcontacts.v1.ContactOrBuilder getContactOrBuilder() { return contact_ == null ? com.google.cloud.essentialcontacts.v1.Contact.getDefaultInstance() : contact_; } public static final int UPDATE_MASK_FIELD_NUMBER = 3; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Optional. The update mask applied to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. The update mask applied to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Optional. The update mask applied to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(2, getContact()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(3, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getContact()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(3, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.essentialcontacts.v1.UpdateContactRequest)) { return super.equals(obj); } com.google.cloud.essentialcontacts.v1.UpdateContactRequest other = (com.google.cloud.essentialcontacts.v1.UpdateContactRequest) obj; if (hasContact() != other.hasContact()) return false; if (hasContact()) { if (!getContact().equals(other.getContact())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasContact()) { hash = (37 * hash) + CONTACT_FIELD_NUMBER; hash = (53 * hash) + getContact().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.essentialcontacts.v1.UpdateContactRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.essentialcontacts.v1.UpdateContactRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.essentialcontacts.v1.UpdateContactRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.essentialcontacts.v1.UpdateContactRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.essentialcontacts.v1.UpdateContactRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.essentialcontacts.v1.UpdateContactRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.essentialcontacts.v1.UpdateContactRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.essentialcontacts.v1.UpdateContactRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.essentialcontacts.v1.UpdateContactRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.essentialcontacts.v1.UpdateContactRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.essentialcontacts.v1.UpdateContactRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.essentialcontacts.v1.UpdateContactRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.essentialcontacts.v1.UpdateContactRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for the UpdateContact method. * </pre> * * Protobuf type {@code google.cloud.essentialcontacts.v1.UpdateContactRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.essentialcontacts.v1.UpdateContactRequest) com.google.cloud.essentialcontacts.v1.UpdateContactRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.essentialcontacts.v1.Service .internal_static_google_cloud_essentialcontacts_v1_UpdateContactRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.essentialcontacts.v1.Service .internal_static_google_cloud_essentialcontacts_v1_UpdateContactRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.essentialcontacts.v1.UpdateContactRequest.class, com.google.cloud.essentialcontacts.v1.UpdateContactRequest.Builder.class); } // Construct using com.google.cloud.essentialcontacts.v1.UpdateContactRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getContactFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; contact_ = null; if (contactBuilder_ != null) { contactBuilder_.dispose(); contactBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.essentialcontacts.v1.Service .internal_static_google_cloud_essentialcontacts_v1_UpdateContactRequest_descriptor; } @java.lang.Override public com.google.cloud.essentialcontacts.v1.UpdateContactRequest getDefaultInstanceForType() { return com.google.cloud.essentialcontacts.v1.UpdateContactRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.essentialcontacts.v1.UpdateContactRequest build() { com.google.cloud.essentialcontacts.v1.UpdateContactRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.essentialcontacts.v1.UpdateContactRequest buildPartial() { com.google.cloud.essentialcontacts.v1.UpdateContactRequest result = new com.google.cloud.essentialcontacts.v1.UpdateContactRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.essentialcontacts.v1.UpdateContactRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.contact_ = contactBuilder_ == null ? contact_ : contactBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.essentialcontacts.v1.UpdateContactRequest) { return mergeFrom((com.google.cloud.essentialcontacts.v1.UpdateContactRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.essentialcontacts.v1.UpdateContactRequest other) { if (other == com.google.cloud.essentialcontacts.v1.UpdateContactRequest.getDefaultInstance()) return this; if (other.hasContact()) { mergeContact(other.getContact()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 18: { input.readMessage(getContactFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 18 case 26: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.essentialcontacts.v1.Contact contact_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.essentialcontacts.v1.Contact, com.google.cloud.essentialcontacts.v1.Contact.Builder, com.google.cloud.essentialcontacts.v1.ContactOrBuilder> contactBuilder_; /** * * * <pre> * Required. The contact resource to replace the existing saved contact. Note: * the email address of the contact cannot be modified. * </pre> * * <code> * .google.cloud.essentialcontacts.v1.Contact contact = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the contact field is set. */ public boolean hasContact() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The contact resource to replace the existing saved contact. Note: * the email address of the contact cannot be modified. * </pre> * * <code> * .google.cloud.essentialcontacts.v1.Contact contact = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The contact. */ public com.google.cloud.essentialcontacts.v1.Contact getContact() { if (contactBuilder_ == null) { return contact_ == null ? com.google.cloud.essentialcontacts.v1.Contact.getDefaultInstance() : contact_; } else { return contactBuilder_.getMessage(); } } /** * * * <pre> * Required. The contact resource to replace the existing saved contact. Note: * the email address of the contact cannot be modified. * </pre> * * <code> * .google.cloud.essentialcontacts.v1.Contact contact = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setContact(com.google.cloud.essentialcontacts.v1.Contact value) { if (contactBuilder_ == null) { if (value == null) { throw new NullPointerException(); } contact_ = value; } else { contactBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The contact resource to replace the existing saved contact. Note: * the email address of the contact cannot be modified. * </pre> * * <code> * .google.cloud.essentialcontacts.v1.Contact contact = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setContact( com.google.cloud.essentialcontacts.v1.Contact.Builder builderForValue) { if (contactBuilder_ == null) { contact_ = builderForValue.build(); } else { contactBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The contact resource to replace the existing saved contact. Note: * the email address of the contact cannot be modified. * </pre> * * <code> * .google.cloud.essentialcontacts.v1.Contact contact = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeContact(com.google.cloud.essentialcontacts.v1.Contact value) { if (contactBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && contact_ != null && contact_ != com.google.cloud.essentialcontacts.v1.Contact.getDefaultInstance()) { getContactBuilder().mergeFrom(value); } else { contact_ = value; } } else { contactBuilder_.mergeFrom(value); } if (contact_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The contact resource to replace the existing saved contact. Note: * the email address of the contact cannot be modified. * </pre> * * <code> * .google.cloud.essentialcontacts.v1.Contact contact = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearContact() { bitField0_ = (bitField0_ & ~0x00000001); contact_ = null; if (contactBuilder_ != null) { contactBuilder_.dispose(); contactBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The contact resource to replace the existing saved contact. Note: * the email address of the contact cannot be modified. * </pre> * * <code> * .google.cloud.essentialcontacts.v1.Contact contact = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.essentialcontacts.v1.Contact.Builder getContactBuilder() { bitField0_ |= 0x00000001; onChanged(); return getContactFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The contact resource to replace the existing saved contact. Note: * the email address of the contact cannot be modified. * </pre> * * <code> * .google.cloud.essentialcontacts.v1.Contact contact = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.essentialcontacts.v1.ContactOrBuilder getContactOrBuilder() { if (contactBuilder_ != null) { return contactBuilder_.getMessageOrBuilder(); } else { return contact_ == null ? com.google.cloud.essentialcontacts.v1.Contact.getDefaultInstance() : contact_; } } /** * * * <pre> * Required. The contact resource to replace the existing saved contact. Note: * the email address of the contact cannot be modified. * </pre> * * <code> * .google.cloud.essentialcontacts.v1.Contact contact = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.essentialcontacts.v1.Contact, com.google.cloud.essentialcontacts.v1.Contact.Builder, com.google.cloud.essentialcontacts.v1.ContactOrBuilder> getContactFieldBuilder() { if (contactBuilder_ == null) { contactBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.essentialcontacts.v1.Contact, com.google.cloud.essentialcontacts.v1.Contact.Builder, com.google.cloud.essentialcontacts.v1.ContactOrBuilder>( getContact(), getParentForChildren(), isClean()); contact_ = null; } return contactBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Optional. The update mask applied to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Optional. The update mask applied to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Optional. The update mask applied to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. The update mask applied to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Optional. The update mask applied to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Optional. The update mask applied to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Optional. The update mask applied to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Optional. The update mask applied to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Optional. The update mask applied to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask * </pre> * * <code>.google.protobuf.FieldMask update_mask = 3 [(.google.api.field_behavior) = OPTIONAL]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.essentialcontacts.v1.UpdateContactRequest) } // @@protoc_insertion_point(class_scope:google.cloud.essentialcontacts.v1.UpdateContactRequest) private static final com.google.cloud.essentialcontacts.v1.UpdateContactRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.essentialcontacts.v1.UpdateContactRequest(); } public static com.google.cloud.essentialcontacts.v1.UpdateContactRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateContactRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateContactRequest>() { @java.lang.Override public UpdateContactRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateContactRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateContactRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.essentialcontacts.v1.UpdateContactRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/jackrabbit
36,220
jackrabbit-jcr-server/src/main/java/org/apache/jackrabbit/server/io/DefaultHandler.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.jackrabbit.server.io; import org.apache.jackrabbit.JcrConstants; import org.apache.jackrabbit.commons.NamespaceHelper; import org.apache.jackrabbit.util.ISO9075; import org.apache.jackrabbit.util.Text; import org.apache.jackrabbit.webdav.DavException; import org.apache.jackrabbit.webdav.DavResource; import org.apache.jackrabbit.webdav.DavServletResponse; import org.apache.jackrabbit.webdav.jcr.JcrDavException; import org.apache.jackrabbit.webdav.xml.Namespace; import org.apache.jackrabbit.webdav.property.DavPropertyName; import org.apache.jackrabbit.webdav.property.DavProperty; import org.apache.jackrabbit.webdav.property.PropEntry; import org.apache.tika.metadata.Metadata; import org.apache.tika.metadata.TikaCoreProperties; import org.slf4j.Logger; import org.slf4j.LoggerFactory; import javax.jcr.Item; import javax.jcr.Node; import javax.jcr.NodeIterator; import javax.jcr.PathNotFoundException; import javax.jcr.Property; import javax.jcr.RepositoryException; import javax.jcr.PropertyIterator; import javax.jcr.Session; import javax.jcr.nodetype.PropertyDefinition; import java.io.IOException; import java.io.InputStream; import java.util.Calendar; import java.util.Date; import java.util.Map; import java.util.List; import java.util.HashMap; /** * <code>DefaultHandler</code> implements a simple IOHandler that creates 'file' * and 'folder' nodes. This handler will create the following nodes: * <ul> * <li>New <b>Collection</b>: creates a new node with the {@link #getCollectionNodeType() * collection nodetype}. The name of the node corresponds to the systemId * present on the import context.</li> * * <li>New <b>Non-Collection</b>: first creates a new node with the {@link #getNodeType() * non-collection nodetype}. The name of the node corresponds to the systemId * present on the import context. Below it creates a node with name * {@link JcrConstants#JCR_CONTENT jcr:content} and the nodetype specified * by {@link #getContentNodeType()}.</li> * </ul> * <p> * Import of the content:<br> * The content is imported to the {@link JcrConstants#JCR_DATA} property of the * content node. By default this handler will fail on a attempt to create/replace * a collection if {@link ImportContext#hasStream()} is <code>true</code>. * Subclasses therefore should provide their own {@link #importData(ImportContext, boolean, Node) * importData} method, that handles the data according their needs. */ public class DefaultHandler implements IOHandler, PropertyHandler, CopyMoveHandler, DeleteHandler { private static Logger log = LoggerFactory.getLogger(DefaultHandler.class); private String collectionNodetype; private String defaultNodetype; private String contentNodetype; private IOManager ioManager; /** * Creates a new <code>DefaultHandler</code> with default nodetype definitions:<br> * <ul> * <li>Nodetype for Collection: {@link JcrConstants#NT_FOLDER nt:folder}</li> * <li>Nodetype for Non-Collection: {@link JcrConstants#NT_FILE nt:file}</li> * <li>Nodetype for Non-Collection content: {@link JcrConstants#NT_UNSTRUCTURED nt:unstructured}</li> * </ul> */ public DefaultHandler() { this(null); } /** * Creates a new <code>DefaultHandler</code> with default nodetype definitions:<br> * <ul> * <li>Nodetype for Collection: {@link JcrConstants#NT_FOLDER nt:folder}</li> * <li>Nodetype for Non-Collection: {@link JcrConstants#NT_FILE nt:file}</li> * <li>Nodetype for Non-Collection content: {@link JcrConstants#NT_UNSTRUCTURED nt:unstructured}</li> * </ul> * * @param ioManager the I/O manager */ public DefaultHandler(IOManager ioManager) { this(ioManager, JcrConstants.NT_FOLDER, JcrConstants.NT_FILE, // IMPORTANT NOTE: for webDAV compliance the default type // of the content node has been changed from nt:resource to // nt:unstructured JcrConstants.NT_UNSTRUCTURED); } /** * Creates a new <code>DefaultHandler</code>. Please note that the specified * nodetypes must match the definitions of the defaults. */ public DefaultHandler(IOManager ioManager, String collectionNodetype, String defaultNodetype, String contentNodetype) { this.ioManager = ioManager; this.collectionNodetype = collectionNodetype; this.defaultNodetype = defaultNodetype; this.contentNodetype = contentNodetype; } /** * @see IOHandler#getIOManager() */ public IOManager getIOManager() { return ioManager; } /** * @see IOHandler#setIOManager(IOManager) */ public void setIOManager(IOManager ioManager) { this.ioManager = ioManager; } /** * @see IOHandler#getName() */ public String getName() { return getClass().getName(); } /** * @see IOHandler#canImport(ImportContext, boolean) */ public boolean canImport(ImportContext context, boolean isCollection) { if (context == null || context.isCompleted()) { return false; } Item contextItem = context.getImportRoot(); return contextItem != null && contextItem.isNode() && context.getSystemId() != null; } /** * @see IOHandler#canImport(ImportContext, DavResource) */ public boolean canImport(ImportContext context, DavResource resource) { if (resource == null) { return false; } return canImport(context, resource.isCollection()); } /** * @see IOHandler#importContent(ImportContext, boolean) */ public boolean importContent(ImportContext context, boolean isCollection) throws IOException { if (!canImport(context, isCollection)) { throw new IOException(getName() + ": Cannot import " + context.getSystemId()); } boolean success = false; try { Node contentNode = getContentNode(context, isCollection); success = importData(context, isCollection, contentNode); if (success) { success = importProperties(context, isCollection, contentNode); } } catch (RepositoryException e) { success = false; throw new IOException(e.getMessage()); } finally { // revert any changes made in case the import failed. if (!success) { try { context.getImportRoot().refresh(false); } catch (RepositoryException e) { throw new IOException(e.getMessage()); } } } return success; } /** * @see IOHandler#importContent(ImportContext, DavResource) */ public boolean importContent(ImportContext context, DavResource resource) throws IOException { if (!canImport(context, resource)) { throw new IOException(getName() + ": Cannot import " + context.getSystemId()); } return importContent(context, resource.isCollection()); } /** * Imports the data present on the import context to the specified content * node. */ protected boolean importData(ImportContext context, boolean isCollection, Node contentNode) throws IOException, RepositoryException { InputStream in = context.getInputStream(); if (in != null) { // NOTE: with the default folder-nodetype (nt:folder) no inputstream // is allowed. setting the property would therefore fail. if (isCollection) { return false; } try { contentNode.setProperty(JcrConstants.JCR_DATA, in); } finally { in.close(); } } // success if no data to import. return true; } /** * Imports the properties present on the specified context to the content * node. */ protected boolean importProperties(ImportContext context, boolean isCollection, Node contentNode) { try { // set mimeType property upon resource creation but don't modify // it on a subsequent PUT. In contrast to a PROPPATCH request, which // is handled by #importProperties(PropertyContext, boolean)} if (!contentNode.hasProperty(JcrConstants.JCR_MIMETYPE)) { contentNode.setProperty(JcrConstants.JCR_MIMETYPE, context.getMimeType()); } } catch (RepositoryException e) { // ignore: property may not be present on the node } try { // set encoding property upon resource creation but don't modify // it on a subsequent PUT. In contrast to a PROPPATCH request, which // is handled by #importProperties(PropertyContext, boolean)} if (!contentNode.hasProperty(JcrConstants.JCR_ENCODING)) { contentNode.setProperty(JcrConstants.JCR_ENCODING, context.getEncoding()); } } catch (RepositoryException e) { // ignore: property may not be present on the node } setLastModified(contentNode, context.getModificationTime()); return true; } /** * Retrieves/creates the node that will be used to import properties and * data. In case of a non-collection this includes and additional content node * to be created beside the 'file' node. * <p> * Please note: If the jcr:content node already exists and contains child * nodes, those will be removed in order to make sure, that the import * really replaces the existing content of the file-node. */ protected Node getContentNode(ImportContext context, boolean isCollection) throws RepositoryException { Node parentNode = (Node)context.getImportRoot(); String name = context.getSystemId(); if (parentNode.hasNode(name)) { parentNode = parentNode.getNode(name); } else { String ntName = (isCollection) ? getCollectionNodeType() : getNodeType(); parentNode = parentNode.addNode(name, ntName); } Node contentNode = null; if (isCollection) { contentNode = parentNode; } else { if (parentNode.hasNode(JcrConstants.JCR_CONTENT)) { contentNode = parentNode.getNode(JcrConstants.JCR_CONTENT); // check if nodetype is compatible (might be update of an existing file) if (contentNode.isNodeType(getContentNodeType()) || !forceCompatibleContentNodes()) { // remove all entries in the jcr:content since replacing content // includes properties (DefaultHandler) and nodes (e.g. ZipHandler) if (contentNode.hasNodes()) { NodeIterator it = contentNode.getNodes(); while (it.hasNext()) { it.nextNode().remove(); } } } else { contentNode.remove(); contentNode = null; } } if (contentNode == null) { // JCR-2070: Use the predefined content node type only // when the underlying repository allows it to be used if (parentNode.getPrimaryNodeType().canAddChildNode( JcrConstants.JCR_CONTENT, getContentNodeType())) { contentNode = parentNode.addNode( JcrConstants.JCR_CONTENT, getContentNodeType()); } else { contentNode = parentNode.addNode(JcrConstants.JCR_CONTENT); } } } return contentNode; } /** * Defines if content nodes should be replace if they don't have the * node type given by {@link #getCollectionNodeType()}. * * @return <code>true</code> if content nodes should be replaced. */ protected boolean forceCompatibleContentNodes() { return false; } /** * Returns true if the export root is a node and if it contains a child node * with name {@link JcrConstants#JCR_CONTENT jcr:content} in case this * export is not intended for a collection. * * @return true if the export root is a node. If the specified boolean parameter * is false (not a collection export) the given export root must contain a * child node with name {@link JcrConstants#JCR_CONTENT jcr:content}. * * @see IOHandler#canExport(ExportContext, boolean) */ public boolean canExport(ExportContext context, boolean isCollection) { if (context == null || context.isCompleted()) { return false; } Item exportRoot = context.getExportRoot(); boolean success = exportRoot != null && exportRoot.isNode(); if (success && !isCollection) { try { Node n = ((Node)exportRoot); success = n.hasNode(JcrConstants.JCR_CONTENT); } catch (RepositoryException e) { // should never occur. success = false; } } return success; } /** * @see IOHandler#canExport(ExportContext, DavResource) */ public boolean canExport(ExportContext context, DavResource resource) { if (resource == null) { return false; } return canExport(context, resource.isCollection()); } /** * Retrieves the content node that will be used for exporting properties and * data and calls the corresponding methods. * * @param context the export context * @param isCollection <code>true</code> if collection * @see #exportProperties(ExportContext, boolean, Node) * @see #exportData(ExportContext, boolean, Node) */ public boolean exportContent(ExportContext context, boolean isCollection) throws IOException { if (!canExport(context, isCollection)) { throw new IOException(getName() + ": Cannot export " + context.getExportRoot()); } try { Node contentNode = getContentNode(context, isCollection); exportProperties(context, isCollection, contentNode); if (context.hasStream()) { exportData(context, isCollection, contentNode); } // else: missing stream. ignore. return true; } catch (RepositoryException e) { // should never occur, since the proper structure of the content // node must be asserted in the 'canExport' call. throw new IOException(e.getMessage()); } } /** * Same as (@link IOHandler#exportContent(ExportContext, boolean)} where * the boolean values is defined by {@link DavResource#isCollection()}. * * @see IOHandler#exportContent(ExportContext, DavResource) */ public boolean exportContent(ExportContext context, DavResource resource) throws IOException { if (!canExport(context, resource)) { throw new IOException(getName() + ": Cannot export " + context.getExportRoot()); } return exportContent(context, resource.isCollection()); } /** * Checks if the given content node contains a jcr:data property * and spools its value to the output stream of the export context.<br> * Please note, that subclasses that define a different structure of the * content node should create their own * {@link #exportData(ExportContext, boolean, Node) exportData} method. * * @param context export context * @param isCollection <code>true</code> if collection * @param contentNode the content node * @throws IOException if an I/O error occurs */ protected void exportData(ExportContext context, boolean isCollection, Node contentNode) throws IOException, RepositoryException { if (contentNode.hasProperty(JcrConstants.JCR_DATA)) { Property p = contentNode.getProperty(JcrConstants.JCR_DATA); IOUtil.spool(p.getStream(), context.getOutputStream()); } // else: stream undefined -> content length was not set } /** * Retrieves mimetype, encoding and modification time from the content node. * The content length is determined by the length of the jcr:data property * if it is present. The creation time however is retrieved from the parent * node (in case of isCollection == false only). * * @param context the export context * @param isCollection <code>true</code> if collection * @param contentNode the content node * @throws java.io.IOException If an error occurs. */ protected void exportProperties(ExportContext context, boolean isCollection, Node contentNode) throws IOException { try { // only non-collections: 'jcr:created' is present on the parent 'fileNode' only if (!isCollection && contentNode.getDepth() > 0 && contentNode.getParent().hasProperty(JcrConstants.JCR_CREATED)) { long cTime = contentNode.getParent().getProperty(JcrConstants.JCR_CREATED).getValue().getLong(); context.setCreationTime(cTime); } long length = IOUtil.UNDEFINED_LENGTH; if (contentNode.hasProperty(JcrConstants.JCR_DATA)) { Property p = contentNode.getProperty(JcrConstants.JCR_DATA); length = p.getLength(); context.setContentLength(length); } String mimeType = null; String encoding = null; if (contentNode.hasProperty(JcrConstants.JCR_MIMETYPE)) { mimeType = contentNode.getProperty(JcrConstants.JCR_MIMETYPE).getString(); } if (contentNode.hasProperty(JcrConstants.JCR_ENCODING)) { encoding = contentNode.getProperty(JcrConstants.JCR_ENCODING).getString(); // ignore "" encoding (although this is avoided during import) if ("".equals(encoding)) { encoding = null; } } context.setContentType(mimeType, encoding); long modTime = IOUtil.UNDEFINED_TIME; if (contentNode.hasProperty(JcrConstants.JCR_LASTMODIFIED)) { modTime = contentNode.getProperty(JcrConstants.JCR_LASTMODIFIED).getLong(); context.setModificationTime(modTime); } else { context.setModificationTime(System.currentTimeMillis()); } if (length > IOUtil.UNDEFINED_LENGTH && modTime > IOUtil.UNDEFINED_TIME) { String etag = "\"" + length + "-" + modTime + "\""; context.setETag(etag); } } catch (RepositoryException e) { // should never occur log.error("Unexpected error {} while exporting properties: {}", e.getClass().getName(), e.getMessage()); throw new IOException(e.getMessage()); } } /** * Retrieves the content node that contains the data to be exported. In case * isCollection is true, this corresponds to the export root. Otherwise there * must be a child node with name {@link JcrConstants#JCR_CONTENT jcr:content}. * * @param context the export context * @param isCollection <code>true</code> if collection * @return content node used for the export * @throws RepositoryException if an error during repository access occurs. */ protected Node getContentNode(ExportContext context, boolean isCollection) throws RepositoryException { Node contentNode = (Node)context.getExportRoot(); // 'file' nodes must have an jcr:content child node (see canExport) if (!isCollection) { contentNode = contentNode.getNode(JcrConstants.JCR_CONTENT); } return contentNode; } /** * Name of the nodetype to be used to create a new collection node (folder) * * @return nodetype name */ public String getCollectionNodeType() { return collectionNodetype; } /** * Name of the nodetype to be used to create a new non-collection node (file) * * @return nodetype name */ public String getNodeType() { return defaultNodetype; } /** * Name of the nodetype to be used to create the content node below * a new non-collection node, whose name is always {@link JcrConstants#JCR_CONTENT * jcr:content}. * * @return nodetype name */ public String getContentNodeType() { return contentNodetype; } //----------------------------------------------------< PropertyHandler >--- public boolean canExport(PropertyExportContext context, boolean isCollection) { return canExport((ExportContext) context, isCollection); } public boolean exportProperties(PropertyExportContext exportContext, boolean isCollection) throws RepositoryException { if (!canExport(exportContext, isCollection)) { throw new RepositoryException("PropertyHandler " + getName() + " failed to export properties."); } Node cn = getContentNode(exportContext, isCollection); try { // export the properties common with normal I/O handling exportProperties(exportContext, isCollection, cn); // export all other properties as well PropertyIterator it = cn.getProperties(); while (it.hasNext()) { Property p = it.nextProperty(); String name = p.getName(); PropertyDefinition def = p.getDefinition(); if (def.isMultiple() || isDefinedByFilteredNodeType(def)) { log.debug("Skip property '" + name + "': not added to webdav property set."); continue; } if (JcrConstants.JCR_DATA.equals(name) || JcrConstants.JCR_MIMETYPE.equals(name) || JcrConstants.JCR_ENCODING.equals(name) || JcrConstants.JCR_LASTMODIFIED.equals(name)) { continue; } DavPropertyName davName = getDavName(name, p.getSession()); exportContext.setProperty(davName, p.getValue().getString()); } return true; } catch (IOException e) { // should not occur (log output see 'exportProperties') return false; } } public boolean canImport(PropertyImportContext context, boolean isCollection) { if (context == null || context.isCompleted()) { return false; } Item contextItem = context.getImportRoot(); try { return contextItem != null && contextItem.isNode() && (isCollection || ((Node)contextItem).hasNode(JcrConstants.JCR_CONTENT)); } catch (RepositoryException e) { log.error("Unexpected error: " + e.getMessage()); return false; } } public Map<? extends PropEntry, ?> importProperties(PropertyImportContext importContext, boolean isCollection) throws RepositoryException { if (!canImport(importContext, isCollection)) { throw new RepositoryException("PropertyHandler " + getName() + " failed import properties"); } // loop over List and remember all properties and propertyNames // that failed to be imported (set or remove). Map<PropEntry, RepositoryException> failures = new HashMap<PropEntry, RepositoryException>(); List<? extends PropEntry> changeList = importContext.getChangeList(); // for collections the import-root is the target node where properties // are altered. in contrast 'non-collections' are with the handler // represented by 'file' nodes, that must have a jcr:content child // node, which holds all properties except jcr:created. // -> see canImport for the corresponding assertions Node cn = (Node) importContext.getImportRoot(); if (!isCollection && cn.hasNode(JcrConstants.JCR_CONTENT)) { cn = cn.getNode(JcrConstants.JCR_CONTENT); } if (changeList != null) { for (PropEntry propEntry : changeList) { try { if (propEntry instanceof DavPropertyName) { // remove DavPropertyName propName = (DavPropertyName) propEntry; removeJcrProperty(propName, cn); } else if (propEntry instanceof DavProperty) { // add or modify property DavProperty<?> prop = (DavProperty<?>) propEntry; setJcrProperty(prop, cn); } else { // ignore any other entry in the change list log.error("unknown object in change list: " + propEntry.getClass().getName()); } } catch (RepositoryException e) { failures.put(propEntry, e); } } } if (failures.isEmpty()) { setLastModified(cn, IOUtil.UNDEFINED_LENGTH); } return failures; } /** * Detects the media type of a document based on the given name. * * @param name document name * @return detected content type (or application/octet-stream) */ protected String detect(String name) { try { Metadata metadata = new Metadata(); metadata.set(TikaCoreProperties.RESOURCE_NAME_KEY, name); if (ioManager != null && ioManager.getDetector() != null) { return ioManager.getDetector().detect(null, metadata).toString(); } else { return "application/octet-stream"; } } catch (IOException e) { // Can not happen since the InputStream above is null throw new IllegalStateException( "Unexpected IOException", e); } } //----------------------------------------------------< CopyMoveHandler >--- /** * @see CopyMoveHandler#canCopy(CopyMoveContext, org.apache.jackrabbit.webdav.DavResource, org.apache.jackrabbit.webdav.DavResource) */ public boolean canCopy(CopyMoveContext context, DavResource source, DavResource destination) { return true; } /** * @see CopyMoveHandler#copy(CopyMoveContext, org.apache.jackrabbit.webdav.DavResource, org.apache.jackrabbit.webdav.DavResource) */ public boolean copy(CopyMoveContext context, DavResource source, DavResource destination) throws DavException { if (context.isShallowCopy() && source.isCollection()) { // TODO: currently no support for shallow copy; however this is // only relevant if the source resource is a collection, because // otherwise it doesn't make a difference throw new DavException(DavServletResponse.SC_FORBIDDEN, "Unable to perform shallow copy."); } try { context.getSession().getWorkspace().copy(source.getLocator().getRepositoryPath(), destination.getLocator().getRepositoryPath()); return true; } catch (PathNotFoundException e) { // according to rfc 2518: missing parent throw new DavException(DavServletResponse.SC_CONFLICT, e.getMessage()); } catch (RepositoryException e) { throw new JcrDavException(e); } } /** * @see CopyMoveHandler#canMove(CopyMoveContext, org.apache.jackrabbit.webdav.DavResource, org.apache.jackrabbit.webdav.DavResource) */ public boolean canMove(CopyMoveContext context, DavResource source, DavResource destination) { return true; } /** * @see CopyMoveHandler#move(CopyMoveContext, org.apache.jackrabbit.webdav.DavResource, org.apache.jackrabbit.webdav.DavResource) */ public boolean move(CopyMoveContext context, DavResource source, DavResource destination) throws DavException { try { context.getWorkspace().move(source.getLocator().getRepositoryPath(), destination.getLocator().getRepositoryPath()); return true; } catch (RepositoryException e) { throw new JcrDavException(e); } } //----------------------------------------------------< DeleteHandler >--- /** * @see DeleteHandler#canDelete(DeleteContext, DavResource) */ public boolean canDelete(DeleteContext deleteContext, DavResource member) { return true; } /** * @see DeleteHandler#delete(DeleteContext, DavResource) */ public boolean delete(DeleteContext deleteContext, DavResource member) throws DavException { try { String itemPath = member.getLocator().getRepositoryPath(); Item item = deleteContext.getSession().getItem(itemPath); if (item instanceof Node) { ((Node) item).removeShare(); } else { item.remove(); } deleteContext.getSession().save(); log.debug("default handler deleted {}", member.getResourcePath()); return true; } catch (RepositoryException e) { throw new JcrDavException(e); } } //------------------------------------------------------------< private >--- /** * Builds a webdav property name from the given jcrName. In case the jcrName * contains a namespace prefix that would conflict with any of the predefined * webdav namespaces a new prefix is assigned.<br> * Please note, that the local part of the jcrName is checked for XML * compatibility by calling {@link ISO9075#encode(String)} * * @param jcrName name of the jcr property * @param session session * @return a <code>DavPropertyName</code> for the given jcr name. * @throws RepositoryException if an error during repository access occurs. */ private DavPropertyName getDavName(String jcrName, Session session) throws RepositoryException { // make sure the local name is xml compliant String localName = ISO9075.encode(Text.getLocalName(jcrName)); String prefix = Text.getNamespacePrefix(jcrName); String uri = session.getNamespaceURI(prefix); Namespace namespace = Namespace.getNamespace(prefix, uri); DavPropertyName name = DavPropertyName.create(localName, namespace); return name; } /** * Build jcr property name from dav property name. If the property name * defines a namespace uri, that has not been registered yet, an attempt * is made to register the uri with the prefix defined. * * @param propName name of the dav property * @param session repository session * @return jcr name * @throws RepositoryException if an error during repository access occurs. */ private String getJcrName(DavPropertyName propName, Session session) throws RepositoryException { // remove any encoding necessary for xml compliance String pName = ISO9075.decode(propName.getName()); Namespace propNamespace = propName.getNamespace(); if (!Namespace.EMPTY_NAMESPACE.equals(propNamespace)) { NamespaceHelper helper = new NamespaceHelper(session); String prefix = helper.registerNamespace( propNamespace.getPrefix(), propNamespace.getURI()); pName = prefix + ":" + pName; } return pName; } /** * @param property dav property * @param contentNode the content node * @throws RepositoryException if an error during repository access occurs. */ private void setJcrProperty(DavProperty<?> property, Node contentNode) throws RepositoryException { // Retrieve the property value. Note, that a 'null' value is replaced // by empty string, since setting a jcr property value to 'null' // would be equivalent to its removal. String value = ""; if (property.getValue() != null) { value = property.getValue().toString(); } DavPropertyName davName = property.getName(); if (DavPropertyName.GETCONTENTTYPE.equals(davName)) { String mimeType = IOUtil.getMimeType(value); String encoding = IOUtil.getEncoding(value); contentNode.setProperty(JcrConstants.JCR_MIMETYPE, mimeType); contentNode.setProperty(JcrConstants.JCR_ENCODING, encoding); } else { contentNode.setProperty(getJcrName(davName, contentNode.getSession()), value); } } /** * @param propertyName dav property name * @param contentNode the content node * @throws RepositoryException if an error during repository access occurs. */ private void removeJcrProperty(DavPropertyName propertyName, Node contentNode) throws RepositoryException { if (DavPropertyName.GETCONTENTTYPE.equals(propertyName)) { if (contentNode.hasProperty(JcrConstants.JCR_MIMETYPE)) { contentNode.getProperty(JcrConstants.JCR_MIMETYPE).remove(); } if (contentNode.hasProperty(JcrConstants.JCR_ENCODING)) { contentNode.getProperty(JcrConstants.JCR_ENCODING).remove(); } } else { String jcrName = getJcrName(propertyName, contentNode.getSession()); if (contentNode.hasProperty(jcrName)) { contentNode.getProperty(jcrName).remove(); } // removal of non existing property succeeds } } private void setLastModified(Node contentNode, long hint) { try { Calendar lastMod = Calendar.getInstance(); if (hint > IOUtil.UNDEFINED_TIME) { lastMod.setTimeInMillis(hint); } else { lastMod.setTime(new Date()); } contentNode.setProperty(JcrConstants.JCR_LASTMODIFIED, lastMod); } catch (RepositoryException e) { // ignore: property may not be available on the node. // deliberately not re-throwing as IOException. } } private static boolean isDefinedByFilteredNodeType(PropertyDefinition def) { String ntName = def.getDeclaringNodeType().getName(); return ntName.equals(JcrConstants.NT_BASE) || ntName.equals(JcrConstants.MIX_REFERENCEABLE) || ntName.equals(JcrConstants.MIX_VERSIONABLE) || ntName.equals(JcrConstants.MIX_LOCKABLE); } //-------------------------------------------< setter for configuration >--- public void setCollectionNodetype(String collectionNodetype) { this.collectionNodetype = collectionNodetype; } public void setDefaultNodetype(String defaultNodetype) { this.defaultNodetype = defaultNodetype; } public void setContentNodetype(String contentNodetype) { this.contentNodetype = contentNodetype; } }
googleapis/google-cloud-java
35,913
java-video-stitcher/proto-google-cloud-video-stitcher-v1/src/main/java/com/google/cloud/video/stitcher/v1/LiveAdTagDetail.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/video/stitcher/v1/ad_tag_details.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.video.stitcher.v1; /** * * * <pre> * Information related to the details for one ad tag. This resource is only * available for live sessions that do not implement Google Ad Manager ad * insertion. * </pre> * * Protobuf type {@code google.cloud.video.stitcher.v1.LiveAdTagDetail} */ public final class LiveAdTagDetail extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.video.stitcher.v1.LiveAdTagDetail) LiveAdTagDetailOrBuilder { private static final long serialVersionUID = 0L; // Use LiveAdTagDetail.newBuilder() to construct. private LiveAdTagDetail(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private LiveAdTagDetail() { name_ = ""; adRequests_ = java.util.Collections.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new LiveAdTagDetail(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.video.stitcher.v1.AdTagDetailsProto .internal_static_google_cloud_video_stitcher_v1_LiveAdTagDetail_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.video.stitcher.v1.AdTagDetailsProto .internal_static_google_cloud_video_stitcher_v1_LiveAdTagDetail_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.video.stitcher.v1.LiveAdTagDetail.class, com.google.cloud.video.stitcher.v1.LiveAdTagDetail.Builder.class); } public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * The resource name in the form of * `projects/{project}/locations/{location}/liveSessions/{live_session}/liveAdTagDetails/{id}`. * </pre> * * <code>string name = 1;</code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * The resource name in the form of * `projects/{project}/locations/{location}/liveSessions/{live_session}/liveAdTagDetails/{id}`. * </pre> * * <code>string name = 1;</code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int AD_REQUESTS_FIELD_NUMBER = 2; @SuppressWarnings("serial") private java.util.List<com.google.cloud.video.stitcher.v1.AdRequest> adRequests_; /** * * * <pre> * A list of ad requests. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.AdRequest ad_requests = 2;</code> */ @java.lang.Override public java.util.List<com.google.cloud.video.stitcher.v1.AdRequest> getAdRequestsList() { return adRequests_; } /** * * * <pre> * A list of ad requests. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.AdRequest ad_requests = 2;</code> */ @java.lang.Override public java.util.List<? extends com.google.cloud.video.stitcher.v1.AdRequestOrBuilder> getAdRequestsOrBuilderList() { return adRequests_; } /** * * * <pre> * A list of ad requests. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.AdRequest ad_requests = 2;</code> */ @java.lang.Override public int getAdRequestsCount() { return adRequests_.size(); } /** * * * <pre> * A list of ad requests. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.AdRequest ad_requests = 2;</code> */ @java.lang.Override public com.google.cloud.video.stitcher.v1.AdRequest getAdRequests(int index) { return adRequests_.get(index); } /** * * * <pre> * A list of ad requests. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.AdRequest ad_requests = 2;</code> */ @java.lang.Override public com.google.cloud.video.stitcher.v1.AdRequestOrBuilder getAdRequestsOrBuilder(int index) { return adRequests_.get(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } for (int i = 0; i < adRequests_.size(); i++) { output.writeMessage(2, adRequests_.get(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } for (int i = 0; i < adRequests_.size(); i++) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, adRequests_.get(i)); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.video.stitcher.v1.LiveAdTagDetail)) { return super.equals(obj); } com.google.cloud.video.stitcher.v1.LiveAdTagDetail other = (com.google.cloud.video.stitcher.v1.LiveAdTagDetail) obj; if (!getName().equals(other.getName())) return false; if (!getAdRequestsList().equals(other.getAdRequestsList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); if (getAdRequestsCount() > 0) { hash = (37 * hash) + AD_REQUESTS_FIELD_NUMBER; hash = (53 * hash) + getAdRequestsList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.video.stitcher.v1.LiveAdTagDetail parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.video.stitcher.v1.LiveAdTagDetail parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.video.stitcher.v1.LiveAdTagDetail parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.video.stitcher.v1.LiveAdTagDetail parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.video.stitcher.v1.LiveAdTagDetail parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.video.stitcher.v1.LiveAdTagDetail parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.video.stitcher.v1.LiveAdTagDetail parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.video.stitcher.v1.LiveAdTagDetail parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.video.stitcher.v1.LiveAdTagDetail parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.video.stitcher.v1.LiveAdTagDetail parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.video.stitcher.v1.LiveAdTagDetail parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.video.stitcher.v1.LiveAdTagDetail parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.video.stitcher.v1.LiveAdTagDetail prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Information related to the details for one ad tag. This resource is only * available for live sessions that do not implement Google Ad Manager ad * insertion. * </pre> * * Protobuf type {@code google.cloud.video.stitcher.v1.LiveAdTagDetail} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.video.stitcher.v1.LiveAdTagDetail) com.google.cloud.video.stitcher.v1.LiveAdTagDetailOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.video.stitcher.v1.AdTagDetailsProto .internal_static_google_cloud_video_stitcher_v1_LiveAdTagDetail_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.video.stitcher.v1.AdTagDetailsProto .internal_static_google_cloud_video_stitcher_v1_LiveAdTagDetail_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.video.stitcher.v1.LiveAdTagDetail.class, com.google.cloud.video.stitcher.v1.LiveAdTagDetail.Builder.class); } // Construct using com.google.cloud.video.stitcher.v1.LiveAdTagDetail.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; name_ = ""; if (adRequestsBuilder_ == null) { adRequests_ = java.util.Collections.emptyList(); } else { adRequests_ = null; adRequestsBuilder_.clear(); } bitField0_ = (bitField0_ & ~0x00000002); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.video.stitcher.v1.AdTagDetailsProto .internal_static_google_cloud_video_stitcher_v1_LiveAdTagDetail_descriptor; } @java.lang.Override public com.google.cloud.video.stitcher.v1.LiveAdTagDetail getDefaultInstanceForType() { return com.google.cloud.video.stitcher.v1.LiveAdTagDetail.getDefaultInstance(); } @java.lang.Override public com.google.cloud.video.stitcher.v1.LiveAdTagDetail build() { com.google.cloud.video.stitcher.v1.LiveAdTagDetail result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.video.stitcher.v1.LiveAdTagDetail buildPartial() { com.google.cloud.video.stitcher.v1.LiveAdTagDetail result = new com.google.cloud.video.stitcher.v1.LiveAdTagDetail(this); buildPartialRepeatedFields(result); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartialRepeatedFields( com.google.cloud.video.stitcher.v1.LiveAdTagDetail result) { if (adRequestsBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0)) { adRequests_ = java.util.Collections.unmodifiableList(adRequests_); bitField0_ = (bitField0_ & ~0x00000002); } result.adRequests_ = adRequests_; } else { result.adRequests_ = adRequestsBuilder_.build(); } } private void buildPartial0(com.google.cloud.video.stitcher.v1.LiveAdTagDetail result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.name_ = name_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.video.stitcher.v1.LiveAdTagDetail) { return mergeFrom((com.google.cloud.video.stitcher.v1.LiveAdTagDetail) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.video.stitcher.v1.LiveAdTagDetail other) { if (other == com.google.cloud.video.stitcher.v1.LiveAdTagDetail.getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000001; onChanged(); } if (adRequestsBuilder_ == null) { if (!other.adRequests_.isEmpty()) { if (adRequests_.isEmpty()) { adRequests_ = other.adRequests_; bitField0_ = (bitField0_ & ~0x00000002); } else { ensureAdRequestsIsMutable(); adRequests_.addAll(other.adRequests_); } onChanged(); } } else { if (!other.adRequests_.isEmpty()) { if (adRequestsBuilder_.isEmpty()) { adRequestsBuilder_.dispose(); adRequestsBuilder_ = null; adRequests_ = other.adRequests_; bitField0_ = (bitField0_ & ~0x00000002); adRequestsBuilder_ = com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders ? getAdRequestsFieldBuilder() : null; } else { adRequestsBuilder_.addAllMessages(other.adRequests_); } } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { com.google.cloud.video.stitcher.v1.AdRequest m = input.readMessage( com.google.cloud.video.stitcher.v1.AdRequest.parser(), extensionRegistry); if (adRequestsBuilder_ == null) { ensureAdRequestsIsMutable(); adRequests_.add(m); } else { adRequestsBuilder_.addMessage(m); } break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object name_ = ""; /** * * * <pre> * The resource name in the form of * `projects/{project}/locations/{location}/liveSessions/{live_session}/liveAdTagDetails/{id}`. * </pre> * * <code>string name = 1;</code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The resource name in the form of * `projects/{project}/locations/{location}/liveSessions/{live_session}/liveAdTagDetails/{id}`. * </pre> * * <code>string name = 1;</code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The resource name in the form of * `projects/{project}/locations/{location}/liveSessions/{live_session}/liveAdTagDetails/{id}`. * </pre> * * <code>string name = 1;</code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The resource name in the form of * `projects/{project}/locations/{location}/liveSessions/{live_session}/liveAdTagDetails/{id}`. * </pre> * * <code>string name = 1;</code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * The resource name in the form of * `projects/{project}/locations/{location}/liveSessions/{live_session}/liveAdTagDetails/{id}`. * </pre> * * <code>string name = 1;</code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.util.List<com.google.cloud.video.stitcher.v1.AdRequest> adRequests_ = java.util.Collections.emptyList(); private void ensureAdRequestsIsMutable() { if (!((bitField0_ & 0x00000002) != 0)) { adRequests_ = new java.util.ArrayList<com.google.cloud.video.stitcher.v1.AdRequest>(adRequests_); bitField0_ |= 0x00000002; } } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.video.stitcher.v1.AdRequest, com.google.cloud.video.stitcher.v1.AdRequest.Builder, com.google.cloud.video.stitcher.v1.AdRequestOrBuilder> adRequestsBuilder_; /** * * * <pre> * A list of ad requests. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.AdRequest ad_requests = 2;</code> */ public java.util.List<com.google.cloud.video.stitcher.v1.AdRequest> getAdRequestsList() { if (adRequestsBuilder_ == null) { return java.util.Collections.unmodifiableList(adRequests_); } else { return adRequestsBuilder_.getMessageList(); } } /** * * * <pre> * A list of ad requests. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.AdRequest ad_requests = 2;</code> */ public int getAdRequestsCount() { if (adRequestsBuilder_ == null) { return adRequests_.size(); } else { return adRequestsBuilder_.getCount(); } } /** * * * <pre> * A list of ad requests. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.AdRequest ad_requests = 2;</code> */ public com.google.cloud.video.stitcher.v1.AdRequest getAdRequests(int index) { if (adRequestsBuilder_ == null) { return adRequests_.get(index); } else { return adRequestsBuilder_.getMessage(index); } } /** * * * <pre> * A list of ad requests. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.AdRequest ad_requests = 2;</code> */ public Builder setAdRequests(int index, com.google.cloud.video.stitcher.v1.AdRequest value) { if (adRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAdRequestsIsMutable(); adRequests_.set(index, value); onChanged(); } else { adRequestsBuilder_.setMessage(index, value); } return this; } /** * * * <pre> * A list of ad requests. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.AdRequest ad_requests = 2;</code> */ public Builder setAdRequests( int index, com.google.cloud.video.stitcher.v1.AdRequest.Builder builderForValue) { if (adRequestsBuilder_ == null) { ensureAdRequestsIsMutable(); adRequests_.set(index, builderForValue.build()); onChanged(); } else { adRequestsBuilder_.setMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * A list of ad requests. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.AdRequest ad_requests = 2;</code> */ public Builder addAdRequests(com.google.cloud.video.stitcher.v1.AdRequest value) { if (adRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAdRequestsIsMutable(); adRequests_.add(value); onChanged(); } else { adRequestsBuilder_.addMessage(value); } return this; } /** * * * <pre> * A list of ad requests. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.AdRequest ad_requests = 2;</code> */ public Builder addAdRequests(int index, com.google.cloud.video.stitcher.v1.AdRequest value) { if (adRequestsBuilder_ == null) { if (value == null) { throw new NullPointerException(); } ensureAdRequestsIsMutable(); adRequests_.add(index, value); onChanged(); } else { adRequestsBuilder_.addMessage(index, value); } return this; } /** * * * <pre> * A list of ad requests. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.AdRequest ad_requests = 2;</code> */ public Builder addAdRequests( com.google.cloud.video.stitcher.v1.AdRequest.Builder builderForValue) { if (adRequestsBuilder_ == null) { ensureAdRequestsIsMutable(); adRequests_.add(builderForValue.build()); onChanged(); } else { adRequestsBuilder_.addMessage(builderForValue.build()); } return this; } /** * * * <pre> * A list of ad requests. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.AdRequest ad_requests = 2;</code> */ public Builder addAdRequests( int index, com.google.cloud.video.stitcher.v1.AdRequest.Builder builderForValue) { if (adRequestsBuilder_ == null) { ensureAdRequestsIsMutable(); adRequests_.add(index, builderForValue.build()); onChanged(); } else { adRequestsBuilder_.addMessage(index, builderForValue.build()); } return this; } /** * * * <pre> * A list of ad requests. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.AdRequest ad_requests = 2;</code> */ public Builder addAllAdRequests( java.lang.Iterable<? extends com.google.cloud.video.stitcher.v1.AdRequest> values) { if (adRequestsBuilder_ == null) { ensureAdRequestsIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, adRequests_); onChanged(); } else { adRequestsBuilder_.addAllMessages(values); } return this; } /** * * * <pre> * A list of ad requests. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.AdRequest ad_requests = 2;</code> */ public Builder clearAdRequests() { if (adRequestsBuilder_ == null) { adRequests_ = java.util.Collections.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); } else { adRequestsBuilder_.clear(); } return this; } /** * * * <pre> * A list of ad requests. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.AdRequest ad_requests = 2;</code> */ public Builder removeAdRequests(int index) { if (adRequestsBuilder_ == null) { ensureAdRequestsIsMutable(); adRequests_.remove(index); onChanged(); } else { adRequestsBuilder_.remove(index); } return this; } /** * * * <pre> * A list of ad requests. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.AdRequest ad_requests = 2;</code> */ public com.google.cloud.video.stitcher.v1.AdRequest.Builder getAdRequestsBuilder(int index) { return getAdRequestsFieldBuilder().getBuilder(index); } /** * * * <pre> * A list of ad requests. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.AdRequest ad_requests = 2;</code> */ public com.google.cloud.video.stitcher.v1.AdRequestOrBuilder getAdRequestsOrBuilder(int index) { if (adRequestsBuilder_ == null) { return adRequests_.get(index); } else { return adRequestsBuilder_.getMessageOrBuilder(index); } } /** * * * <pre> * A list of ad requests. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.AdRequest ad_requests = 2;</code> */ public java.util.List<? extends com.google.cloud.video.stitcher.v1.AdRequestOrBuilder> getAdRequestsOrBuilderList() { if (adRequestsBuilder_ != null) { return adRequestsBuilder_.getMessageOrBuilderList(); } else { return java.util.Collections.unmodifiableList(adRequests_); } } /** * * * <pre> * A list of ad requests. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.AdRequest ad_requests = 2;</code> */ public com.google.cloud.video.stitcher.v1.AdRequest.Builder addAdRequestsBuilder() { return getAdRequestsFieldBuilder() .addBuilder(com.google.cloud.video.stitcher.v1.AdRequest.getDefaultInstance()); } /** * * * <pre> * A list of ad requests. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.AdRequest ad_requests = 2;</code> */ public com.google.cloud.video.stitcher.v1.AdRequest.Builder addAdRequestsBuilder(int index) { return getAdRequestsFieldBuilder() .addBuilder(index, com.google.cloud.video.stitcher.v1.AdRequest.getDefaultInstance()); } /** * * * <pre> * A list of ad requests. * </pre> * * <code>repeated .google.cloud.video.stitcher.v1.AdRequest ad_requests = 2;</code> */ public java.util.List<com.google.cloud.video.stitcher.v1.AdRequest.Builder> getAdRequestsBuilderList() { return getAdRequestsFieldBuilder().getBuilderList(); } private com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.video.stitcher.v1.AdRequest, com.google.cloud.video.stitcher.v1.AdRequest.Builder, com.google.cloud.video.stitcher.v1.AdRequestOrBuilder> getAdRequestsFieldBuilder() { if (adRequestsBuilder_ == null) { adRequestsBuilder_ = new com.google.protobuf.RepeatedFieldBuilderV3< com.google.cloud.video.stitcher.v1.AdRequest, com.google.cloud.video.stitcher.v1.AdRequest.Builder, com.google.cloud.video.stitcher.v1.AdRequestOrBuilder>( adRequests_, ((bitField0_ & 0x00000002) != 0), getParentForChildren(), isClean()); adRequests_ = null; } return adRequestsBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.video.stitcher.v1.LiveAdTagDetail) } // @@protoc_insertion_point(class_scope:google.cloud.video.stitcher.v1.LiveAdTagDetail) private static final com.google.cloud.video.stitcher.v1.LiveAdTagDetail DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.video.stitcher.v1.LiveAdTagDetail(); } public static com.google.cloud.video.stitcher.v1.LiveAdTagDetail getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<LiveAdTagDetail> PARSER = new com.google.protobuf.AbstractParser<LiveAdTagDetail>() { @java.lang.Override public LiveAdTagDetail parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<LiveAdTagDetail> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<LiveAdTagDetail> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.video.stitcher.v1.LiveAdTagDetail getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/hadoop
36,101
hadoop-hdfs-project/hadoop-hdfs/src/main/java/org/apache/hadoop/hdfs/server/namenode/FSDirWriteFileOp.java
/** * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hadoop.hdfs.server.namenode; import org.apache.hadoop.util.Preconditions; import org.apache.hadoop.HadoopIllegalArgumentException; import org.apache.hadoop.fs.XAttrSetFlag; import org.apache.hadoop.hdfs.AddBlockFlag; import org.apache.hadoop.fs.CreateFlag; import org.apache.hadoop.fs.FileAlreadyExistsException; import org.apache.hadoop.fs.FileEncryptionInfo; import org.apache.hadoop.fs.XAttr; import org.apache.hadoop.fs.permission.AclEntry; import org.apache.hadoop.fs.permission.FsAction; import org.apache.hadoop.fs.permission.PermissionStatus; import org.apache.hadoop.hdfs.DFSConfigKeys; import org.apache.hadoop.hdfs.DFSUtil; import org.apache.hadoop.hdfs.protocol.Block; import org.apache.hadoop.hdfs.protocol.BlockStoragePolicy; import org.apache.hadoop.hdfs.protocol.ClientProtocol; import org.apache.hadoop.hdfs.protocol.DatanodeInfo; import org.apache.hadoop.hdfs.protocol.ErasureCodingPolicy; import org.apache.hadoop.hdfs.protocol.ExtendedBlock; import org.apache.hadoop.hdfs.protocol.FSLimitException; import org.apache.hadoop.hdfs.protocol.HdfsFileStatus; import org.apache.hadoop.hdfs.protocol.LocatedBlock; import org.apache.hadoop.hdfs.protocol.QuotaExceededException; import org.apache.hadoop.hdfs.security.token.block.BlockTokenIdentifier; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfo; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoContiguous; import org.apache.hadoop.hdfs.server.blockmanagement.BlockInfoStriped; import org.apache.hadoop.hdfs.server.blockmanagement.BlockManager; import org.apache.hadoop.hdfs.protocol.BlockType; import org.apache.hadoop.hdfs.server.blockmanagement.BlockUnderConstructionFeature; import org.apache.hadoop.hdfs.server.blockmanagement.DatanodeStorageInfo; import org.apache.hadoop.hdfs.server.common.HdfsServerConstants; import org.apache.hadoop.hdfs.server.namenode.FSDirectory.DirOp; import org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot; import org.apache.hadoop.hdfs.util.RwLockMode; import org.apache.hadoop.io.erasurecode.ErasureCodeConstants; import org.apache.hadoop.net.Node; import org.apache.hadoop.net.NodeBase; import org.apache.hadoop.util.ChunkedArrayList; import java.io.FileNotFoundException; import java.io.IOException; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.EnumSet; import java.util.HashSet; import java.util.List; import java.util.Set; import static org.apache.hadoop.hdfs.server.namenode.snapshot.Snapshot.CURRENT_STATE_ID; import static org.apache.hadoop.util.Time.now; class FSDirWriteFileOp { private FSDirWriteFileOp() {} static boolean unprotectedRemoveBlock( FSDirectory fsd, String path, INodesInPath iip, INodeFile fileNode, Block block) throws IOException { // modify file-> block and blocksMap // fileNode should be under construction BlockInfo uc = fileNode.removeLastBlock(block); if (uc == null) { return false; } if (uc.getUnderConstructionFeature() != null) { DatanodeStorageInfo.decrementBlocksScheduled(uc .getUnderConstructionFeature().getExpectedStorageLocations()); } fsd.getBlockManager().removeBlockFromMap(uc); if(NameNode.stateChangeLog.isDebugEnabled()) { NameNode.stateChangeLog.debug("DIR* FSDirectory.removeBlock: " +path+" with "+block +" block is removed from the file system"); } // update space consumed fsd.updateCount(iip, 0, -fileNode.getPreferredBlockSize(), fileNode.getPreferredBlockReplication(), true); return true; } /** * Persist the block list for the inode. */ static void persistBlocks( FSDirectory fsd, String path, INodeFile file, boolean logRetryCache) { assert fsd.getFSNamesystem().hasWriteLock(RwLockMode.FS); Preconditions.checkArgument(file.isUnderConstruction()); fsd.getEditLog().logUpdateBlocks(path, file, logRetryCache); if(NameNode.stateChangeLog.isDebugEnabled()) { NameNode.stateChangeLog.debug("persistBlocks: " + path + " with " + file.getBlocks().length + " blocks is persisted to" + " the file system"); } } static void abandonBlock( FSDirectory fsd, FSPermissionChecker pc, ExtendedBlock b, long fileId, String src, String holder) throws IOException { final INodesInPath iip = fsd.resolvePath(pc, src, fileId); src = iip.getPath(); FSNamesystem fsn = fsd.getFSNamesystem(); final INodeFile file = fsn.checkLease(iip, holder, fileId); Preconditions.checkState(file.isUnderConstruction()); if (file.getBlockType() == BlockType.STRIPED) { return; // do not abandon block for striped file } Block localBlock = ExtendedBlock.getLocalBlock(b); fsd.writeLock(); try { // Remove the block from the pending creates list if (!unprotectedRemoveBlock(fsd, src, iip, file, localBlock)) { return; } } finally { fsd.writeUnlock(); } persistBlocks(fsd, src, file, false); } static void checkBlock(FSNamesystem fsn, ExtendedBlock block) throws IOException { String bpId = fsn.getBlockPoolId(); if (block != null && !bpId.equals(block.getBlockPoolId())) { throw new IOException("Unexpected BlockPoolId " + block.getBlockPoolId() + " - expected " + bpId); } } /** * Part I of getAdditionalBlock(). * Analyze the state of the file under read lock to determine if the client * can add a new block, detect potential retries, lease mismatches, * and minimal replication of the penultimate block. * * Generate target DataNode locations for the new block, * but do not create the new block yet. */ static ValidateAddBlockResult validateAddBlock( FSNamesystem fsn, FSPermissionChecker pc, String src, long fileId, String clientName, ExtendedBlock previous, LocatedBlock[] onRetryBlock) throws IOException { final long blockSize; final short numTargets; final byte storagePolicyID; String clientMachine; final BlockType blockType; INodesInPath iip = fsn.dir.resolvePath(pc, src, fileId); FileState fileState = analyzeFileState(fsn, iip, fileId, clientName, previous, onRetryBlock); if (onRetryBlock[0] != null && onRetryBlock[0].getLocations().length > 0) { // This is a retry. No need to generate new locations. // Use the last block if it has locations. return null; } final INodeFile pendingFile = fileState.inode; if (!fsn.checkFileProgress(src, pendingFile, false)) { throw new NotReplicatedYetException("Not replicated yet: " + src); } if (pendingFile.getBlocks().length >= fsn.maxBlocksPerFile) { throw new IOException("File has reached the limit on maximum number of" + " blocks (" + DFSConfigKeys.DFS_NAMENODE_MAX_BLOCKS_PER_FILE_KEY + "): " + pendingFile.getBlocks().length + " >= " + fsn.maxBlocksPerFile); } blockSize = pendingFile.getPreferredBlockSize(); clientMachine = pendingFile.getFileUnderConstructionFeature() .getClientMachine(); blockType = pendingFile.getBlockType(); ErasureCodingPolicy ecPolicy = null; if (blockType == BlockType.STRIPED) { ecPolicy = FSDirErasureCodingOp.unprotectedGetErasureCodingPolicy(fsn, iip); numTargets = (short) (ecPolicy.getSchema().getNumDataUnits() + ecPolicy.getSchema().getNumParityUnits()); } else { numTargets = pendingFile.getFileReplication(); } storagePolicyID = pendingFile.getStoragePolicyID(); return new ValidateAddBlockResult(blockSize, numTargets, storagePolicyID, clientMachine, blockType, ecPolicy); } static LocatedBlock makeLocatedBlock(FSNamesystem fsn, BlockInfo blk, DatanodeStorageInfo[] locs, long offset) throws IOException { LocatedBlock lBlk = BlockManager.newLocatedBlock( fsn.getExtendedBlock(new Block(blk)), blk, locs, offset); fsn.getBlockManager().setBlockToken(lBlk, BlockTokenIdentifier.AccessMode.WRITE); return lBlk; } /** * Part II of getAdditionalBlock(). * Should repeat the same analysis of the file state as in Part 1, * but under the write lock. * If the conditions still hold, then allocate a new block with * the new targets, add it to the INode and to the BlocksMap. */ static LocatedBlock storeAllocatedBlock(FSNamesystem fsn, String src, long fileId, String clientName, ExtendedBlock previous, DatanodeStorageInfo[] targets) throws IOException { long offset; // Run the full analysis again, since things could have changed // while chooseTarget() was executing. LocatedBlock[] onRetryBlock = new LocatedBlock[1]; INodesInPath iip = fsn.dir.resolvePath(null, src, fileId); FileState fileState = analyzeFileState(fsn, iip, fileId, clientName, previous, onRetryBlock); final INodeFile pendingFile = fileState.inode; src = fileState.path; if (onRetryBlock[0] != null) { if (onRetryBlock[0].getLocations().length > 0) { // This is a retry. Just return the last block if having locations. return onRetryBlock[0]; } else { // add new chosen targets to already allocated block and return BlockInfo lastBlockInFile = pendingFile.getLastBlock(); lastBlockInFile.getUnderConstructionFeature().setExpectedLocations( lastBlockInFile, targets, pendingFile.getBlockType()); offset = pendingFile.computeFileSize(); return makeLocatedBlock(fsn, lastBlockInFile, targets, offset); } } // commit the last block and complete it if it has minimum replicas fsn.commitOrCompleteLastBlock(pendingFile, fileState.iip, ExtendedBlock.getLocalBlock(previous)); final BlockType blockType = pendingFile.getBlockType(); // allocate new block, record block locations in INode. Block newBlock = fsn.createNewBlock(blockType); INodesInPath inodesInPath = INodesInPath.fromINode(pendingFile); saveAllocatedBlock(fsn, src, inodesInPath, newBlock, targets, blockType); persistNewBlock(fsn, src, pendingFile); offset = pendingFile.computeFileSize(); // Return located block return makeLocatedBlock(fsn, fsn.getStoredBlock(newBlock), targets, offset); } static DatanodeStorageInfo[] chooseTargetForNewBlock( BlockManager bm, String src, DatanodeInfo[] excludedNodes, String[] favoredNodes, EnumSet<AddBlockFlag> flags, ValidateAddBlockResult r) throws IOException { Node clientNode = null; boolean ignoreClientLocality = (flags != null && flags.contains(AddBlockFlag.IGNORE_CLIENT_LOCALITY)); // If client locality is ignored, clientNode remains 'null' to indicate if (!ignoreClientLocality) { clientNode = bm.getDatanodeManager().getDatanodeByHost(r.clientMachine); if (clientNode == null) { clientNode = getClientNode(bm, r.clientMachine); } } Set<Node> excludedNodesSet = (excludedNodes == null) ? new HashSet<>() : new HashSet<>(Arrays.asList(excludedNodes)); List<String> favoredNodesList = (favoredNodes == null) ? Collections.emptyList() : Arrays.asList(favoredNodes); // choose targets for the new block to be allocated. return bm.chooseTarget4NewBlock(src, r.numTargets, clientNode, excludedNodesSet, r.blockSize, favoredNodesList, r.storagePolicyID, r.blockType, r.ecPolicy, flags); } /** * Resolve clientmachine address to get a network location path */ static Node getClientNode(BlockManager bm, String clientMachine) { List<String> hosts = new ArrayList<>(1); hosts.add(clientMachine); List<String> rName = bm.getDatanodeManager() .resolveNetworkLocation(hosts); Node clientNode = null; if (rName != null) { // Able to resolve clientMachine mapping. // Create a temp node to findout the rack local nodes clientNode = new NodeBase(rName.get(0) + NodeBase.PATH_SEPARATOR_STR + clientMachine); } return clientNode; } static INodesInPath resolvePathForStartFile(FSDirectory dir, FSPermissionChecker pc, String src, EnumSet<CreateFlag> flag, boolean createParent) throws IOException { INodesInPath iip = dir.resolvePath(pc, src, DirOp.CREATE); if (dir.isPermissionEnabled()) { dir.checkAncestorAccess(pc, iip, FsAction.WRITE); } INode inode = iip.getLastINode(); if (inode != null) { // Verify that the destination does not exist as a directory already. if (inode.isDirectory()) { throw new FileAlreadyExistsException(iip.getPath() + " already exists as a directory"); } // Verifies it's indeed a file and perms allow overwrite INodeFile.valueOf(inode, src); if (dir.isPermissionEnabled() && flag.contains(CreateFlag.OVERWRITE)) { dir.checkPathAccess(pc, iip, FsAction.WRITE); } } else { if (!createParent) { dir.verifyParentDir(iip); } if (!flag.contains(CreateFlag.CREATE)) { throw new FileNotFoundException("Can't overwrite non-existent " + src); } } return iip; } /** * Create a new file or overwrite an existing file<br> * * Once the file is create the client then allocates a new block with the next * call using {@link ClientProtocol#addBlock}. * <p> * For description of parameters and exceptions thrown see * {@link ClientProtocol#create} */ static HdfsFileStatus startFile( FSNamesystem fsn, INodesInPath iip, PermissionStatus permissions, String holder, String clientMachine, EnumSet<CreateFlag> flag, boolean createParent, short replication, long blockSize, FileEncryptionInfo feInfo, INode.BlocksMapUpdateInfo toRemoveBlocks, boolean shouldReplicate, String ecPolicyName, String storagePolicy, boolean logRetryEntry) throws IOException { assert fsn.hasWriteLock(RwLockMode.FS); boolean overwrite = flag.contains(CreateFlag.OVERWRITE); boolean isLazyPersist = flag.contains(CreateFlag.LAZY_PERSIST); final String src = iip.getPath(); FSDirectory fsd = fsn.getFSDirectory(); if (iip.getLastINode() != null) { fsn.writeLock(RwLockMode.BM); try { if (overwrite) { List<INode> toRemoveINodes = new ChunkedArrayList<>(); List<Long> toRemoveUCFiles = new ChunkedArrayList<>(); long ret = FSDirDeleteOp.delete(fsd, iip, toRemoveBlocks, toRemoveINodes, toRemoveUCFiles, now()); if (ret >= 0) { iip = INodesInPath.replace(iip, iip.length() - 1, null); FSDirDeleteOp.incrDeletedFileCount(ret); fsn.removeLeasesAndINodes(toRemoveUCFiles, toRemoveINodes, true); } } else { // If lease soft limit time is expired, recover the lease fsn.recoverLeaseInternal(FSNamesystem.RecoverLeaseOp.CREATE_FILE, iip, src, holder, clientMachine, false); throw new FileAlreadyExistsException(src + " for client " + clientMachine + " already exists"); } } finally { fsn.writeUnlock(RwLockMode.BM, "create"); } } fsn.checkFsObjectLimit(); INodeFile newNode = null; INodesInPath parent = FSDirMkdirOp.createAncestorDirectories(fsd, iip, permissions); if (parent != null) { iip = addFile(fsd, parent, iip.getLastLocalName(), permissions, replication, blockSize, holder, clientMachine, shouldReplicate, ecPolicyName, storagePolicy); newNode = iip != null ? iip.getLastINode().asFile() : null; } if (newNode == null) { throw new IOException("Unable to add " + src + " to namespace"); } fsn.leaseManager.addLease( newNode.getFileUnderConstructionFeature().getClientName(), newNode.getId()); if (feInfo != null) { FSDirEncryptionZoneOp.setFileEncryptionInfo(fsd, iip, feInfo, XAttrSetFlag.CREATE); } setNewINodeStoragePolicy(fsd.getBlockManager(), iip, isLazyPersist); fsd.getEditLog().logOpenFile(src, newNode, overwrite, logRetryEntry); if (NameNode.stateChangeLog.isDebugEnabled()) { NameNode.stateChangeLog.debug("DIR* NameSystem.startFile: added " + src + " inode " + newNode.getId() + " " + holder); } return FSDirStatAndListingOp.getFileInfo(fsd, iip, false, false); } static INodeFile addFileForEditLog( FSDirectory fsd, long id, INodesInPath existing, byte[] localName, PermissionStatus permissions, List<AclEntry> aclEntries, List<XAttr> xAttrs, short replication, long modificationTime, long atime, long preferredBlockSize, boolean underConstruction, String clientName, String clientMachine, byte storagePolicyId, byte ecPolicyID) { final INodeFile newNode; Preconditions.checkNotNull(existing); assert fsd.hasWriteLock(); try { // check if the file has an EC policy boolean isStriped = ecPolicyID != ErasureCodeConstants.REPLICATION_POLICY_ID; ErasureCodingPolicy ecPolicy = null; if (isStriped) { ecPolicy = fsd.getFSNamesystem().getErasureCodingPolicyManager() .getByID(ecPolicyID); if (ecPolicy == null) { throw new IOException(String.format( "Cannot find erasure coding policy for new file %s/%s, " + "ecPolicyID=%d", existing.getPath(), Arrays.toString(localName), ecPolicyID)); } } final BlockType blockType = isStriped ? BlockType.STRIPED : BlockType.CONTIGUOUS; final Short replicationFactor = (!isStriped ? replication : null); if (underConstruction) { newNode = newINodeFile(id, permissions, modificationTime, modificationTime, replicationFactor, ecPolicyID, preferredBlockSize, storagePolicyId, blockType); newNode.toUnderConstruction(clientName, clientMachine); } else { newNode = newINodeFile(id, permissions, modificationTime, atime, replicationFactor, ecPolicyID, preferredBlockSize, storagePolicyId, blockType); } newNode.setLocalName(localName); INodesInPath iip = fsd.addINode(existing, newNode, permissions.getPermission()); if (iip != null) { if (aclEntries != null) { AclStorage.updateINodeAcl(newNode, aclEntries, CURRENT_STATE_ID); } if (xAttrs != null) { XAttrStorage.updateINodeXAttrs(newNode, xAttrs, CURRENT_STATE_ID); } return newNode; } } catch (IOException e) { NameNode.stateChangeLog.warn( "DIR* FSDirectory.unprotectedAddFile: exception when add " + existing .getPath() + " to the file system", e); if (e instanceof FSLimitException.MaxDirectoryItemsExceededException) { NameNode.stateChangeLog.warn("Please increase " + "dfs.namenode.fs-limits.max-directory-items and make it " + "consistent across all NameNodes."); } } return null; } /** * Add a block to the file. Returns a reference to the added block. */ private static BlockInfo addBlock(FSDirectory fsd, String path, INodesInPath inodesInPath, Block block, DatanodeStorageInfo[] targets, BlockType blockType) throws IOException { fsd.writeLock(); try { final INodeFile fileINode = inodesInPath.getLastINode().asFile(); Preconditions.checkState(fileINode.isUnderConstruction()); // associate new last block for the file final BlockInfo blockInfo; if (blockType == BlockType.STRIPED) { ErasureCodingPolicy ecPolicy = FSDirErasureCodingOp.unprotectedGetErasureCodingPolicy( fsd.getFSNamesystem(), inodesInPath); short numDataUnits = (short) ecPolicy.getNumDataUnits(); short numParityUnits = (short) ecPolicy.getNumParityUnits(); short numLocations = (short) (numDataUnits + numParityUnits); // check quota limits and updated space consumed fsd.updateCount(inodesInPath, 0, fileINode.getPreferredBlockSize(), numLocations, true); blockInfo = new BlockInfoStriped(block, ecPolicy); blockInfo.convertToBlockUnderConstruction( HdfsServerConstants.BlockUCState.UNDER_CONSTRUCTION, targets); } else { // check quota limits and updated space consumed fsd.updateCount(inodesInPath, 0, fileINode.getPreferredBlockSize(), fileINode.getFileReplication(), true); short numLocations = fileINode.getFileReplication(); blockInfo = new BlockInfoContiguous(block, numLocations); blockInfo.convertToBlockUnderConstruction( HdfsServerConstants.BlockUCState.UNDER_CONSTRUCTION, targets); } fsd.getBlockManager().addBlockCollection(blockInfo, fileINode); fileINode.addBlock(blockInfo); if(NameNode.stateChangeLog.isDebugEnabled()) { NameNode.stateChangeLog.debug("DIR* FSDirectory.addBlock: " + path + " with " + block + " block is added to the in-memory " + "file system"); } return blockInfo; } finally { fsd.writeUnlock(); } } /** * Add the given filename to the fs. * @return the new INodesInPath instance that contains the new INode */ private static INodesInPath addFile( FSDirectory fsd, INodesInPath existing, byte[] localName, PermissionStatus permissions, short replication, long preferredBlockSize, String clientName, String clientMachine, boolean shouldReplicate, String ecPolicyName, String storagePolicy) throws IOException { Preconditions.checkNotNull(existing); long modTime = now(); INodesInPath newiip; fsd.writeLock(); try { boolean isStriped = false; ErasureCodingPolicy ecPolicy = null; byte storagepolicyid = 0; if (storagePolicy != null && !storagePolicy.isEmpty()) { BlockStoragePolicy policy = fsd.getBlockManager().getStoragePolicy(storagePolicy); if (policy == null) { throw new HadoopIllegalArgumentException( "Cannot find a block policy with the name " + storagePolicy); } storagepolicyid = policy.getId(); } if (!shouldReplicate) { ecPolicy = FSDirErasureCodingOp.getErasureCodingPolicy( fsd.getFSNamesystem(), ecPolicyName, existing); if (ecPolicy != null && (!ecPolicy.isReplicationPolicy())) { isStriped = true; } } final BlockType blockType = isStriped ? BlockType.STRIPED : BlockType.CONTIGUOUS; final Short replicationFactor = (!isStriped ? replication : null); final Byte ecPolicyID = (isStriped ? ecPolicy.getId() : null); INodeFile newNode = newINodeFile(fsd.allocateNewInodeId(), permissions, modTime, modTime, replicationFactor, ecPolicyID, preferredBlockSize, storagepolicyid, blockType); newNode.setLocalName(localName); newNode.toUnderConstruction(clientName, clientMachine); newiip = fsd.addINode(existing, newNode, permissions.getPermission()); } finally { fsd.writeUnlock(); } if (newiip == null) { NameNode.stateChangeLog.info("DIR* addFile: failed to add " + existing.getPath() + "/" + DFSUtil.bytes2String(localName)); return null; } if(NameNode.stateChangeLog.isDebugEnabled()) { NameNode.stateChangeLog.debug("DIR* addFile: " + DFSUtil.bytes2String(localName) + " is added"); } return newiip; } private static FileState analyzeFileState( FSNamesystem fsn, INodesInPath iip, long fileId, String clientName, ExtendedBlock previous, LocatedBlock[] onRetryBlock) throws IOException { assert fsn.hasReadLock(RwLockMode.GLOBAL); String src = iip.getPath(); checkBlock(fsn, previous); onRetryBlock[0] = null; fsn.checkNameNodeSafeMode("Cannot add block to " + src); // have we exceeded the configured limit of fs objects. fsn.checkFsObjectLimit(); Block previousBlock = ExtendedBlock.getLocalBlock(previous); final INodeFile file = fsn.checkLease(iip, clientName, fileId); BlockInfo lastBlockInFile = file.getLastBlock(); if (!Block.matchingIdAndGenStamp(previousBlock, lastBlockInFile)) { // The block that the client claims is the current last block // doesn't match up with what we think is the last block. There are // four possibilities: // 1) This is the first block allocation of an append() pipeline // which started appending exactly at or exceeding the block boundary. // In this case, the client isn't passed the previous block, // so it makes the allocateBlock() call with previous=null. // We can distinguish this since the last block of the file // will be exactly a full block. // 2) This is a retry from a client that missed the response of a // prior getAdditionalBlock() call, perhaps because of a network // timeout, or because of an HA failover. In that case, we know // by the fact that the client is re-issuing the RPC that it // never began to write to the old block. Hence it is safe to // return the existing block. // 3) This is an entirely bogus request/bug -- we should error out // rather than potentially appending a new block with an empty // one in the middle, etc // 4) This is a retry from a client that timed out while // the prior getAdditionalBlock() is still being processed, // currently working on chooseTarget(). // There are no means to distinguish between the first and // the second attempts in Part I, because the first one hasn't // changed the namesystem state yet. // We run this analysis again in Part II where case 4 is impossible. BlockInfo penultimateBlock = file.getPenultimateBlock(); if (previous == null && lastBlockInFile != null && lastBlockInFile.getNumBytes() >= file.getPreferredBlockSize() && lastBlockInFile.isComplete()) { // Case 1 if (NameNode.stateChangeLog.isDebugEnabled()) { NameNode.stateChangeLog.debug( "BLOCK* NameSystem.allocateBlock: handling block allocation" + " writing to a file with a complete previous block: src=" + src + " lastBlock=" + lastBlockInFile); } } else if (Block.matchingIdAndGenStamp(penultimateBlock, previousBlock)) { if (lastBlockInFile.getNumBytes() != 0) { throw new IOException( "Request looked like a retry to allocate block " + lastBlockInFile + " but it already contains " + lastBlockInFile.getNumBytes() + " bytes"); } // Case 2 // Return the last block. NameNode.stateChangeLog.info("BLOCK* allocateBlock: caught retry for " + "allocation of a new block in " + src + ". Returning previously" + " allocated block " + lastBlockInFile); long offset = file.computeFileSize(); BlockUnderConstructionFeature uc = lastBlockInFile.getUnderConstructionFeature(); onRetryBlock[0] = makeLocatedBlock(fsn, lastBlockInFile, uc.getExpectedStorageLocations(), offset); return new FileState(file, src, iip); } else { // Case 3 throw new IOException("Cannot allocate block in " + src + ": " + "passed 'previous' block " + previous + " does not match actual " + "last block in file " + lastBlockInFile); } } return new FileState(file, src, iip); } static boolean completeFile(FSNamesystem fsn, FSPermissionChecker pc, final String srcArg, String holder, ExtendedBlock last, long fileId) throws IOException { String src = srcArg; if (NameNode.stateChangeLog.isDebugEnabled()) { NameNode.stateChangeLog.debug("DIR* NameSystem.completeFile: " + src + " for " + holder); } checkBlock(fsn, last); INodesInPath iip = fsn.dir.resolvePath(pc, src, fileId); return completeFileInternal(fsn, iip, holder, ExtendedBlock.getLocalBlock(last), fileId); } private static boolean completeFileInternal( FSNamesystem fsn, INodesInPath iip, String holder, Block last, long fileId) throws IOException { assert fsn.hasWriteLock(RwLockMode.GLOBAL); final String src = iip.getPath(); final INodeFile pendingFile; INode inode = null; try { inode = iip.getLastINode(); pendingFile = fsn.checkLease(iip, holder, fileId); } catch (LeaseExpiredException lee) { if (inode != null && inode.isFile() && !inode.asFile().isUnderConstruction()) { // This could be a retry RPC - i.e the client tried to close // the file, but missed the RPC response. Thus, it is trying // again to close the file. If the file still exists and // the client's view of the last block matches the actual // last block, then we'll treat it as a successful close. // See HDFS-3031. final Block realLastBlock = inode.asFile().getLastBlock(); if (Block.matchingIdAndGenStamp(last, realLastBlock)) { NameNode.stateChangeLog.info("DIR* completeFile: " + "request from " + holder + " to complete inode " + fileId + "(" + src + ") which is already closed. But, it appears to be " + "an RPC retry. Returning success"); return true; } } throw lee; } // Check the state of the penultimate block. It should be completed // before attempting to complete the last one. if (!fsn.checkFileProgress(src, pendingFile, false)) { return false; } // commit the last block and complete it if it has minimum replicas fsn.commitOrCompleteLastBlock(pendingFile, iip, last); if (!fsn.checkFileProgress(src, pendingFile, true)) { return false; } fsn.addCommittedBlocksToPending(pendingFile); fsn.finalizeINodeFileUnderConstruction(src, pendingFile, Snapshot.CURRENT_STATE_ID, true); return true; } private static INodeFile newINodeFile( long id, PermissionStatus permissions, long mtime, long atime, Short replication, Byte ecPolicyID, long preferredBlockSize, byte storagePolicyId, BlockType blockType) { return new INodeFile(id, null, permissions, mtime, atime, BlockInfo.EMPTY_ARRAY, replication, ecPolicyID, preferredBlockSize, storagePolicyId, blockType); } /** * Persist the new block (the last block of the given file). */ private static void persistNewBlock( FSNamesystem fsn, String path, INodeFile file) { Preconditions.checkArgument(file.isUnderConstruction()); fsn.getEditLog().logAddBlock(path, file); if (NameNode.stateChangeLog.isDebugEnabled()) { NameNode.stateChangeLog.debug("persistNewBlock: " + path + " with new block " + file.getLastBlock().toString() + ", current total block count is " + file.getBlocks().length); } } /** * Save allocated block at the given pending filename * * @param fsn FSNamesystem * @param src path to the file * @param inodesInPath representing each of the components of src. * The last INode is the INode for {@code src} file. * @param newBlock newly allocated block to be save * @param targets target datanodes where replicas of the new block is placed * @throws QuotaExceededException If addition of block exceeds space quota */ static void saveAllocatedBlock(FSNamesystem fsn, String src, INodesInPath inodesInPath, Block newBlock, DatanodeStorageInfo[] targets, BlockType blockType) throws IOException { assert fsn.hasWriteLock(RwLockMode.GLOBAL); BlockInfo b = addBlock(fsn.dir, src, inodesInPath, newBlock, targets, blockType); logAllocatedBlock(src, b); DatanodeStorageInfo.incrementBlocksScheduled(targets); } private static void logAllocatedBlock(String src, BlockInfo b) { if (!NameNode.stateChangeLog.isInfoEnabled()) { return; } StringBuilder sb = new StringBuilder(150); sb.append("BLOCK* allocate "); b.appendStringTo(sb); sb.append(", "); BlockUnderConstructionFeature uc = b.getUnderConstructionFeature(); if (uc != null) { uc.appendUCPartsConcise(sb); } sb.append(" for " + src); NameNode.stateChangeLog.info(sb.toString()); } private static void setNewINodeStoragePolicy(BlockManager bm, INodesInPath iip, boolean isLazyPersist) throws IOException { INodeFile inode = iip.getLastINode().asFile(); if (isLazyPersist) { BlockStoragePolicy lpPolicy = bm.getStoragePolicy("LAZY_PERSIST"); // Set LAZY_PERSIST storage policy if the flag was passed to // CreateFile. if (lpPolicy == null) { throw new HadoopIllegalArgumentException( "The LAZY_PERSIST storage policy has been disabled " + "by the administrator."); } inode.setStoragePolicyID(lpPolicy.getId(), iip.getLatestSnapshotId()); } else { BlockStoragePolicy effectivePolicy = bm.getStoragePolicy(inode.getStoragePolicyID()); if (effectivePolicy != null && effectivePolicy.isCopyOnCreateFile()) { // Copy effective policy from ancestor directory to current file. inode.setStoragePolicyID(effectivePolicy.getId(), iip.getLatestSnapshotId()); } } } private static class FileState { final INodeFile inode; final String path; final INodesInPath iip; FileState(INodeFile inode, String fullPath, INodesInPath iip) { this.inode = inode; this.path = fullPath; this.iip = iip; } } static class ValidateAddBlockResult { private final long blockSize; private final int numTargets; private final byte storagePolicyID; private final String clientMachine; private final BlockType blockType; private final ErasureCodingPolicy ecPolicy; ValidateAddBlockResult( long blockSize, int numTargets, byte storagePolicyID, String clientMachine, BlockType blockType, ErasureCodingPolicy ecPolicy) { this.blockSize = blockSize; this.numTargets = numTargets; this.storagePolicyID = storagePolicyID; this.clientMachine = clientMachine; this.blockType = blockType; this.ecPolicy = ecPolicy; if (blockType == BlockType.STRIPED) { Preconditions.checkArgument(ecPolicy != null, "ecPolicy is not specified for striped block"); } } } }
apache/stanbol
35,998
entityhub/jersey/src/main/java/org/apache/stanbol/entityhub/web/reader/FieldQueryReader.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.stanbol.entityhub.web.reader; import java.io.IOException; import java.io.InputStream; import java.lang.annotation.Annotation; import java.lang.reflect.Type; import java.util.ArrayList; import java.util.Arrays; import java.util.Collection; import java.util.Collections; import java.util.Iterator; import java.util.List; import javax.servlet.ServletContext; import javax.ws.rs.WebApplicationException; import javax.ws.rs.core.Context; import javax.ws.rs.core.HttpHeaders; import javax.ws.rs.core.MediaType; import javax.ws.rs.core.MultivaluedMap; import javax.ws.rs.core.Response; import javax.ws.rs.core.Response.Status; import javax.ws.rs.ext.MessageBodyReader; import javax.ws.rs.ext.Provider; import org.apache.commons.io.IOUtils; import org.apache.felix.scr.annotations.Component; import org.apache.felix.scr.annotations.Property; import org.apache.felix.scr.annotations.Reference; import org.apache.felix.scr.annotations.Service; import org.apache.stanbol.commons.namespaceprefix.NamespaceMappingUtils; import org.apache.stanbol.commons.namespaceprefix.NamespacePrefixService; import org.apache.stanbol.entityhub.core.mapping.ValueConverterFactory; import org.apache.stanbol.entityhub.core.model.InMemoryValueFactory; import org.apache.stanbol.entityhub.core.query.FieldQueryImpl; import org.apache.stanbol.entityhub.ldpath.query.LDPathFieldQueryImpl; import org.apache.stanbol.entityhub.servicesapi.model.ValueFactory; import org.apache.stanbol.entityhub.servicesapi.query.Constraint; import org.apache.stanbol.entityhub.servicesapi.query.Constraint.ConstraintType; import org.apache.stanbol.entityhub.servicesapi.query.FieldQuery; import org.apache.stanbol.entityhub.servicesapi.query.RangeConstraint; import org.apache.stanbol.entityhub.servicesapi.query.ReferenceConstraint; import org.apache.stanbol.entityhub.servicesapi.query.SimilarityConstraint; import org.apache.stanbol.entityhub.servicesapi.query.TextConstraint; import org.apache.stanbol.entityhub.servicesapi.query.TextConstraint.PatternType; import org.apache.stanbol.entityhub.servicesapi.query.ValueConstraint; import org.apache.stanbol.entityhub.servicesapi.query.ValueConstraint.MODE; import org.codehaus.jettison.json.JSONArray; import org.codehaus.jettison.json.JSONException; import org.codehaus.jettison.json.JSONObject; import org.slf4j.Logger; import org.slf4j.LoggerFactory; @Component @Service(Object.class) @Property(name="javax.ws.rs", boolValue=true) @Provider public class FieldQueryReader implements MessageBodyReader<FieldQuery> { private static final Logger log = LoggerFactory.getLogger(FieldQueryReader.class); private static final ValueFactory valueFactory = InMemoryValueFactory.getInstance(); private static final ValueConverterFactory converterFactory = ValueConverterFactory.getDefaultInstance(); @Reference NamespacePrefixService namespacePrefixService; private NamespacePrefixService getNsPrefixService(){ return namespacePrefixService; } @Override public boolean isReadable(Class<?> type, Type genericType, Annotation[] annotations, MediaType mediaType) { log.debug("isReadable type {}, mediaType {}",type,mediaType); return FieldQuery.class.isAssignableFrom(type); //&& mediaType.isCompatible(MediaType.APPLICATION_JSON_TYPE); } @Override public FieldQuery readFrom(Class<FieldQuery> type, Type genericType, Annotation[] annotations, MediaType mediaType, MultivaluedMap<String,String> httpHeaders, InputStream entityStream) throws IOException, WebApplicationException { String queryString = IOUtils.toString(entityStream); log.debug("Parsed QueryString: \n{}",queryString); MediaType acceptedType = MediaType.valueOf(httpHeaders.getFirst("Accept")); if(acceptedType.isWildcardType()){ acceptedType = MediaType.TEXT_PLAIN_TYPE; } try { return fromJSON(queryString,acceptedType, getNsPrefixService()); } catch (JSONException e) { log.error("Unable to parse Request ",e); StringBuilder message = new StringBuilder(); message.append("Parsed FieldQuery is not valid JSON\n"); message.append("Parsed String:\n"); message.append(queryString); log.warn(message.toString()); //TODO: Jersey wraps Exceptions thrown by MessageBodyReader into // other ones. Because of that the Response created by the // WebApplicationException is "lost" and the user will get an // 500 with no comment and HTML content type :( // As a workaround one could use a wrapping object as generic type // that parses the error and than throw the Exception within the // RDFTerm using this MessageBodyReader throw new WebApplicationException( Response.status(Status.BAD_REQUEST). entity(message.toString()). header(HttpHeaders.ACCEPT, acceptedType.toString()).build()); } } /** * * @param queryFactory * @param jsonQueryString * @param acceptedMediaType used to add the accept header to Error responses * @return * @throws JSONException * @throws WebApplicationException */ public static FieldQuery fromJSON(String jsonQueryString,MediaType acceptedMediaType, NamespacePrefixService nsPrefixService) throws JSONException,WebApplicationException { if(jsonQueryString == null){ throw new IllegalArgumentException("The parsed JSON object MUST NOT be NULL!"); } JSONObject jQuery = new JSONObject(jsonQueryString); FieldQuery query; if(jQuery.has("ldpath")){ //STANBOL-417: support for using LDPath as select LDPathFieldQueryImpl ldPathQuery = new LDPathFieldQueryImpl(); ldPathQuery.setLDPathSelect(jQuery.getString("ldpath")); query = ldPathQuery; } else { query = new FieldQueryImpl(); } if(!jQuery.has("constraints")){ StringBuilder message = new StringBuilder(); message.append("The parsed Field Query MUST contain at least a single 'constraints'\n"); message.append("Parsed Query:\n"); message.append(jQuery.toString(4)); log.warn(message.toString()); throw new WebApplicationException( Response.status(Status.BAD_REQUEST).entity( message.toString()).header(HttpHeaders.ACCEPT, acceptedMediaType.toString()) .build()); } JSONArray constraints = jQuery.getJSONArray("constraints"); //collect all parsing Errors to report a complete set of all errors boolean parsingError = false; StringBuilder parsingErrorMessages = new StringBuilder(); parsingErrorMessages.append("Constraint parsing Errors:\n"); for(int i=0;i<constraints.length();i++){ JSONObject jConstraint = constraints.getJSONObject(i); if(jConstraint.has("field")){ String field = jConstraint.getString("field"); //check if there is already a constraint for that field if(field == null || field.isEmpty()){ parsingErrorMessages.append('\n'); parsingErrorMessages.append( "Each Field Query Constraint MUST define a value for 'field'\n"); parsingErrorMessages.append("Parsed Constraint:\n"); parsingErrorMessages.append(jConstraint.toString(4)); parsingErrorMessages.append('\n'); parsingError = true; continue; } String fieldUri = nsPrefixService.getFullName(field); if(fieldUri == null){ parsingErrorMessages.append('\n'); parsingErrorMessages.append( "The 'field' '").append(field).append("uses an unknown namespace prefix '"); parsingErrorMessages.append(NamespaceMappingUtils.getPrefix(field)).append("'\n"); parsingErrorMessages.append("Parsed Constraint:\n"); parsingErrorMessages.append(jConstraint.toString(4)); parsingErrorMessages.append('\n'); parsingError = true; continue; }else if(query.isConstrained(fieldUri)){ parsingErrorMessages.append('\n'); parsingErrorMessages.append("The parsed Query defines multiple constraints fr the field '").append(fieldUri).append("'!\n"); parsingErrorMessages.append("FieldQuery allows only a single Constraint for a field\n"); parsingErrorMessages.append("Parsed Constraints:\n"); parsingErrorMessages.append(constraints.toString(4)); parsingErrorMessages.append('\n'); parsingError = true; continue; } else { try { query.setConstraint(fieldUri, parseConstraint(jConstraint,nsPrefixService)); } catch (IllegalArgumentException e) { parsingErrorMessages.append('\n'); parsingErrorMessages.append(e.getMessage()); parsingErrorMessages.append('\n'); parsingError = true; continue; } } } else { //empty field parsingErrorMessages.append('\n'); parsingErrorMessages.append("Constraints MUST define a value for 'field'\n"); parsingErrorMessages.append("Parsed Constraint:\n"); parsingErrorMessages.append(jConstraint.toString(4)); parsingErrorMessages.append('\n'); parsingError = true; continue; } } if(parsingError){ String message = parsingErrorMessages.toString(); log.warn(message); throw new WebApplicationException( Response.status(Status.BAD_REQUEST).entity( message).header(HttpHeaders.ACCEPT, acceptedMediaType.toString()) .build()); } //parse selected fields JSONArray selected = jQuery.optJSONArray("selected"); if(selected != null){ for(int i=0;i<selected.length();i++){ String selectedField = selected.getString(i); selectedField = nsPrefixService.getFullName(selectedField); if(selectedField != null && !selectedField.isEmpty()){ query.addSelectedField(selectedField); } } } //else no selected fields -> funny but maybe someone do need only the ids //parse limit and offset if(jQuery.has("limit") && !jQuery.isNull("limit")){ try { query.setLimit(jQuery.getInt("limit")); } catch (JSONException e) { parsingErrorMessages.append('\n'); parsingErrorMessages.append("Property \"limit\" MUST BE a valid integer number!\n"); parsingErrorMessages.append("Parsed Value:"); parsingErrorMessages.append(jQuery.get("init")); parsingErrorMessages.append('\n'); parsingError = true; } } if(jQuery.has("offset") && !jQuery.isNull("offset")){ try { query.setOffset(jQuery.getInt("offset")); } catch (JSONException e) { parsingErrorMessages.append('\n'); parsingErrorMessages.append("Property \"offset\" MUST BE a valid integer number!\n"); parsingErrorMessages.append("Parsed Value:"); parsingErrorMessages.append(jQuery.get("init")); parsingErrorMessages.append('\n'); parsingError = true; } } return query; } private static Constraint parseConstraint(JSONObject jConstraint, NamespacePrefixService nsPrefixService) throws JSONException { final Constraint constraint; if(jConstraint.has("type") && !jConstraint.isNull("type")) { String type = jConstraint.getString("type"); //Event that internally "reference" is not part of the //ConstraintType enum it is still present in the serialisation //ant the Java API (see ReferenceConstraint class) //Value constraints with the dataType Reference and AnyURI are //considered to represent reference constraints if(type.equals("reference")){ constraint = parseReferenceConstraint(jConstraint,nsPrefixService); } else if (type.equals(ConstraintType.value.name())){ constraint = parseValueConstraint(jConstraint, nsPrefixService); } else if (type.equals(ConstraintType.text.name())){ constraint = parseTextConstraint(jConstraint); } else if (type.equals(ConstraintType.range.name())){ constraint = parseRangeConstraint(jConstraint,nsPrefixService); } else if(type.equals(ConstraintType.similarity.name())){ constraint = parseSimilarityConstraint(jConstraint, nsPrefixService); } else { log.warn(String.format("Unknown Constraint Type %s. Supported values are %s", Arrays.asList("reference",ConstraintType.values()))); StringBuilder message = new StringBuilder(); message.append("Parsed Constraint uses an unknown value for 'type'!\n"); message.append("Supported values: "); message.append(ConstraintType.values()); message.append('\n'); message.append("Parsed Constraint: \n"); message.append(jConstraint.toString(4)); throw new IllegalArgumentException(message.toString()); } } else { log.warn(String.format("Earch Constraint MUST HAVE the \"type\" key set to one of the values %s", Arrays.asList("reference",ConstraintType.values()))); StringBuilder message = new StringBuilder(); message.append("Parsed Constraint does not define a value for the field 'type'!\n"); message.append("Supported values: "); message.append(ConstraintType.values()); message.append('\n'); message.append("Parsed Constraint: \n"); message.append(jConstraint.toString(4)); throw new IllegalArgumentException(message.toString()); } //finally parse the optional boost if(jConstraint.has("boost")){ double boost = jConstraint.optDouble("boost"); if(boost == Double.NaN || boost <= 0){ StringBuilder message = new StringBuilder("The Boost of a Constraint " + "MUST BE a double AND >= 0 (parsed: '"); message.append(jConstraint.get("boost")).append("')!"); log.warn(message.toString()); throw new IllegalArgumentException(message.toString()); } else { constraint.setBoost(boost); } } //else no boost defined return constraint; } private static Constraint parseSimilarityConstraint(JSONObject jConstraint, NamespacePrefixService nsPrefixService) throws JSONException { String context = jConstraint.optString("context"); if(context == null){ throw new IllegalArgumentException("SimilarityConstraints MUST define a \"context\": \n "+jConstraint.toString(4)); } JSONArray addFields = jConstraint.optJSONArray("addFields"); final List<String> fields; if(addFields != null && addFields.length() > 0){ fields = new ArrayList<String>(addFields.length()); for(int i=0;i<addFields.length();i++){ String field = addFields.optString(i); field = field != null ? nsPrefixService.getFullName(field) : null; if(field != null && !field.isEmpty()){ fields.add(field); } } } else { fields = null; } return new SimilarityConstraint(context,fields); } /** * @param jConstraint * @return * @throws JSONException */ private static Constraint parseRangeConstraint(JSONObject jConstraint, NamespacePrefixService nsPrefixService) throws JSONException { Constraint constraint; boolean inclusive; if(jConstraint.has("inclusive")){ inclusive = jConstraint.getBoolean("inclusive"); } else { log.debug("RangeConstraint does not define the field 'inclusive'. Use false as default!"); inclusive = false; } Object upperBound = jConstraint.opt("upperBound"); Object lowerBound = jConstraint.opt("lowerBound"); Collection<String> datatypes = parseDatatypeProperty(jConstraint,nsPrefixService); if(datatypes != null && !datatypes.isEmpty()){ Iterator<String> it = datatypes.iterator(); String datatype = it.next(); if(datatypes.size() > 1){ //write warning in case of multiple values log.warn("Multiple datatypes are not supported by RangeConstriants!"); log.warn(" used: {}",datatype); while(it.hasNext()){ log.warn(" ignored: {}",it.next()); } } StringBuilder convertingError = null; if(upperBound != null){ Object convertedUpperBound = converterFactory.convert(upperBound, datatype, valueFactory); if(convertedUpperBound == null){ log.warn("Unable to convert upper bound {} to data type {}", upperBound,datatype); convertingError = new StringBuilder(); convertingError.append("Unable to convert the parsed upper bound value ") .append(upperBound).append(" to data type ").append(datatype); } else { //set the converted upper bound upperBound = convertedUpperBound; } } if(lowerBound != null){ Object convertedLowerBound = converterFactory.convert(lowerBound, datatype, valueFactory); if(convertedLowerBound == null){ log.warn("Unable to convert lower bound {} to data type {}", lowerBound,datatype); if(convertingError == null){ convertingError = new StringBuilder(); } else { convertingError.append('\n'); } convertingError.append("Unable to convert the parsed value ") .append(lowerBound).append(" to data type ").append(datatype); } else { //set the converted lower bound lowerBound = convertedLowerBound; } } if(convertingError != null){ //if there was an error throw an exception convertingError.append("Parsed Constraint: \n"); convertingError.append(jConstraint.toString(4)); throw new IllegalArgumentException(convertingError.toString()); } } if(upperBound == null && lowerBound == null){ log.warn("Range Constraint does not define an 'upperBound' nor an 'lowerBound'! " + "At least one of the two MUST BE parsed for a valid RangeConstraint."); StringBuilder message = new StringBuilder(); message.append("Range Constraint does not define an 'upperBound' nor an 'lowerBound'!"); message.append(" At least one of the two MUST BE parsed for a valid RangeConstraint.\n"); message.append("Parsed Constraint: \n"); message.append(jConstraint.toString(4)); throw new IllegalArgumentException(message.toString()); } else { constraint = new RangeConstraint(lowerBound, upperBound, inclusive); } return constraint; } /** * @param jConstraint * @return * @throws JSONException */ private static Constraint parseTextConstraint(JSONObject jConstraint) throws JSONException { final TextConstraint constraint; boolean caseSensitive = jConstraint.optBoolean("caseSensitive", false); //parse patternType PatternType patternType; String jPatternType = jConstraint.optString("patternType",null); if(jPatternType == null){ patternType = PatternType.none; } else { try { patternType = PatternType.valueOf(jPatternType); } catch (IllegalArgumentException e) { log.warn("Encountered unknown patternType for TextConstraint!",e); patternType = PatternType.none; StringBuilder message = new StringBuilder(); message.append("Illegal value for field 'patternType'.\n"); message.append("Supported values are: "); message.append(Arrays.toString(PatternType.values())); message.append('\n'); message.append("Parsed Constraint: \n"); message.append(jConstraint.toString(4)); throw new IllegalArgumentException(message.toString()); } } //parse languages Collection<String> languages; String languageKey = null; //support both "languages" and "language" if(jConstraint.has("language")){ languageKey = "language"; } else if(jConstraint.has("languages")){ log.warn("The key \"languages\" is deprecated. Use \"language\" instead."); languageKey = "languages"; } if(languageKey != null){ JSONArray jLanguages = jConstraint.optJSONArray(languageKey); if(jLanguages != null && jLanguages.length()>0){ languages = new ArrayList<String>(jLanguages.length()); for(int i=0;i<jLanguages.length();i++){ String lang = jLanguages.getString(i); if(lang != null && !lang.isEmpty()){ languages.add(lang); } else if(!languages.contains(null)){ languages.add(null); } } if(languages.isEmpty()){ languages = null; //if no one was successfully added set the list back to null } } else { String language = jConstraint.getString(languageKey); if(language.isEmpty()){ languages = null; } else { //add the single language languages = Collections.singletonList(language); } } } else { languages = null; } //parse text and create constraint if(jConstraint.has("text") && !jConstraint.isNull("text")){ List<String> textConstraints; JSONArray jTextConstraints = jConstraint.optJSONArray("text"); if(jTextConstraints != null){ textConstraints = new ArrayList<String>(jTextConstraints.length()); for(int i=0;i<jTextConstraints.length();i++){ String text = jTextConstraints.getString(i); if(text != null && !text.isEmpty()){ textConstraints.add(jTextConstraints.getString(i)); } } } else { String text = jConstraint.getString("text"); if(text == null || text.isEmpty()){ textConstraints = Collections.emptyList(); } else { textConstraints = Collections.singletonList(text); } } if(textConstraints.isEmpty()){ StringBuilder message = new StringBuilder(); message.append("Parsed TextConstraint doese not define a valid (none empty) value for the 'text' property !\n"); message.append("Parsed Constraint: \n"); message.append(jConstraint.toString(4)); throw new IllegalArgumentException(message.toString()); } constraint = new TextConstraint(textConstraints, patternType,caseSensitive, languages == null?null:languages.toArray(new String[languages.size()])); //finally parse the optional termProximity if(jConstraint.has("proximityRanking")){ constraint.setProximityRanking(jConstraint.optBoolean("proximityRanking", false)); } } else { StringBuilder message = new StringBuilder(); message.append("Parsed TextConstraint doese not define the required field 'text'!\n"); message.append("Parsed Constraint: \n"); message.append(jConstraint.toString(4)); throw new IllegalArgumentException(message.toString()); } return constraint; } /** * @param jConstraint * @return * @throws JSONException */ private static Constraint parseValueConstraint(JSONObject jConstraint, NamespacePrefixService nsPrefixService) throws JSONException { Collection<String> dataTypes = parseDatatypeProperty(jConstraint, nsPrefixService); final List<Object> valueList; if(jConstraint.has("value") && !jConstraint.isNull("value")){ Object value = jConstraint.get("value"); if(value instanceof JSONArray){ valueList = new ArrayList<Object>(((JSONArray)value).length()); for(int i=0;i<((JSONArray)value).length();i++){ Object v = ((JSONArray)value).get(i); if(v == null || v instanceof JSONArray || v instanceof JSONObject){ log.warn("Parsed ValueConstraint does define illegal values (values={})!",value); StringBuilder message = new StringBuilder(); message.append("Parsed ValueConstraint does define illegal values for field 'value'" + "(value MUST NOT contain NULL, JSONObject nor JSONArray values)!\n"); message.append("Parsed Constraint: \n"); message.append(jConstraint.toString(4)); throw new IllegalArgumentException(message.toString()); } valueList.add(v); } } else if(value instanceof JSONObject){ log.warn("Parsed ValueConstraint does define illegal values (values={})!",value); StringBuilder message = new StringBuilder(); message.append("Parsed ValueConstraint does define illegal value for field 'value'" + "(value MUST NOT be an JSON object. Only values and JSONArray to parse" + "multiple values are allowed)!\n"); message.append("Parsed Constraint: \n"); message.append(jConstraint.toString(4)); throw new IllegalArgumentException(message.toString()); } else { valueList = Collections.singletonList(jConstraint.get("value")); } } else { log.warn("Parsed ValueConstraint does not define the required field \"value\"!"); StringBuilder message = new StringBuilder(); message.append("Parsed ValueConstraint does not define the required field 'value'!\n"); message.append("Parsed Constraint: \n"); message.append(jConstraint.toString(4)); throw new IllegalArgumentException(message.toString()); } MODE mode = parseConstraintValueMode(jConstraint); return new ValueConstraint(valueList,dataTypes,mode); } /** * Parses the {@link MODE} for {@link ValueConstraint}s and * {@link ReferenceConstraint}s, by evaluating the 'mode' attribute of * the parsed {@link JSONObject} * @param jConstraint the JSON formatted constraint * @return the parsed {@link MODE} or <code>null</code> if the 'mode' * attribute is not present * @throws JSONException if the value of the 'mode' is not an element of the * {@link MODE} enumeration. */ private static MODE parseConstraintValueMode(JSONObject jConstraint) throws JSONException { MODE mode; if(jConstraint.has("mode")){ String jmode = jConstraint.getString("mode"); try { mode = MODE.valueOf(jmode); } catch (IllegalArgumentException e) { String message = String.format("Parsed ValueConstraint defines an " + "unknown MODE %s (supported: %s)!", jmode, Arrays.asList(MODE.values())); log.warn(message,e); StringBuilder errorMessage = new StringBuilder(); errorMessage.append(message).append('\n'); errorMessage.append("Parsed Constraint: \n"); errorMessage.append(jConstraint.toString(4)); throw new IllegalArgumentException(message,e); } } else { mode = null; } return mode; } /** * @param jConstraint * @return * @throws JSONException */ private static Collection<String> parseDatatypeProperty(JSONObject jConstraint,NamespacePrefixService nsPrefixService) throws JSONException { Collection<String> dataTypes; String dataTypeKey = null; if(jConstraint.has("datatype")){ dataTypeKey = "datatype"; } else if(jConstraint.has("dataTypes")){ log.warn("The use of \"dataTypes\" is deprecated. Please use \"dataType\" instead"); dataTypeKey = "dataTypes"; } if(dataTypeKey != null){ JSONArray jDataTypes = jConstraint.optJSONArray(dataTypeKey); if(jDataTypes != null && jDataTypes.length()>0){ dataTypes = new ArrayList<String>(jDataTypes.length()); for(int i=0;i<jDataTypes.length();i++){ String dataType = jDataTypes.getString(i); //convert prefix:localName to fill URI dataType = dataType != null ? nsPrefixService.getFullName(dataType) : null; if(dataType != null && !dataType.isEmpty()){ dataTypes.add(dataType); } } if(dataTypes.isEmpty()){ dataTypes = null; //if no one was successfully added set the list back to null } } else { String dataType = jConstraint.getString(dataTypeKey); //convert prefix:localName to fill URI dataType = dataType != null ? nsPrefixService.getFullName(dataType) : null; if(dataType != null && !dataType.isEmpty()){ dataTypes = Collections.singleton(dataType); } else { dataTypes = null; } } } else { dataTypes = null; } return dataTypes; } /** * @param jConstraint * @return * @throws JSONException */ private static Constraint parseReferenceConstraint(JSONObject jConstraint, NamespacePrefixService nsPrefixService) throws JSONException { final List<String> refList; if(jConstraint.has("value") && !jConstraint.isNull("value")){ Object value = jConstraint.get("value"); if(value instanceof JSONArray){ refList = new ArrayList<String>(((JSONArray)value).length()); for(int i=0;i<((JSONArray)value).length();i++){ String field = ((JSONArray)value).getString(i); field = field != null ? nsPrefixService.getFullName(field) : null; if(field != null && !field.isEmpty()){ refList.add(field); } } } else if(value instanceof JSONObject){ log.warn("Parsed ValueConstraint does define illegal values (values={})!",value); StringBuilder message = new StringBuilder(); message.append("Parsed ValueConstraint does define illegal value for field 'value'" + "(value MUST NOT be an JSON object. Only values and JSONArray to parse" + "multiple values are allowed)!\n"); message.append("Parsed Constraint: \n"); message.append(jConstraint.toString(4)); throw new IllegalArgumentException(message.toString()); } else { String field = jConstraint.getString("value"); field = field != null ? nsPrefixService.getFullName(field) : null; if(field != null){ refList = Collections.singletonList(field); } else { refList = Collections.emptyList(); } } if(refList.isEmpty()){ log.warn("Parsed ReferenceConstraint does not define a single valid \"value\"!"); StringBuilder message = new StringBuilder(); message.append("Parsed ReferenceConstraint does not define a single valid 'value'!\n"); message.append("This means values where only null, empty string or '{prefix}:{localname}' values with unknown {prefix}\n"); message.append("Parsed Constraint: \n"); message.append(jConstraint.toString(4)); throw new IllegalArgumentException(message.toString()); } MODE mode = parseConstraintValueMode(jConstraint); return new ReferenceConstraint(refList,mode); } else { log.warn("Parsed ReferenceConstraint does not define the required field \"value\"!"); StringBuilder message = new StringBuilder(); message.append("Parsed ReferenceConstraint does not define the required field 'value'!\n"); message.append("Parsed Constraint: \n"); message.append(jConstraint.toString(4)); throw new IllegalArgumentException(message.toString()); } } }
apache/hop
35,937
core/src/main/java/org/apache/hop/core/database/IDatabase.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.hop.core.database; import java.sql.DatabaseMetaData; import java.sql.ResultSet; import java.sql.ResultSetMetaData; import java.sql.SQLException; import java.util.List; import java.util.Map; import org.apache.hop.core.exception.HopDatabaseException; import org.apache.hop.core.exception.HopValueException; import org.apache.hop.core.row.IValueMeta; import org.apache.hop.core.variables.IVariables; import org.apache.hop.metadata.api.HopMetadataObject; /** * This interface describes the methods that a database connection needs to have in order to * describe it properly. */ @HopMetadataObject(objectFactory = DatabaseMetaObjectFactory.class) public interface IDatabase extends Cloneable { /** * @return the plugin id of this database */ String getPluginId(); /** * @param pluginId set the plugin id of this plugin (after instantiation) */ void setPluginId(String pluginId); /** * @return the plugin name of this database, the same thing as the annotation typeDescription */ String getPluginName(); /** * @param pluginName set the plugin name of this plugin (after instantiation) */ void setPluginName(String pluginName); /** * @return Returns the accessType. */ int getAccessType(); /** * @param accessType The accessType to set. */ void setAccessType(int accessType); /** * @return Returns the changed. */ boolean isChanged(); /** * @param changed The changed to set. */ void setChanged(boolean changed); /** * @return Returns the databaseName. */ String getDatabaseName(); /** * @param databaseName The databaseName to set. */ void setDatabaseName(String databaseName); /** * @return Returns the hostname. */ String getHostname(); /** * @param hostname The hostname to set. */ void setHostname(String hostname); /** * @return the username to log onto the database */ String getUsername(); /** * @param username Sets the username to log onto the database with. */ void setUsername(String username); /** * @return Returns the password. */ String getPassword(); /** * @param password The password to set. */ void setPassword(String password); /** * @return Returns the servername. */ String getServername(); /** * @param servername The servername to set. */ void setServername(String servername); /** * @return the tablespace to store data in. (create table) */ String getDataTablespace(); /** * @param dataTablespace the tablespace to store data in */ void setDataTablespace(String dataTablespace); /** * @return the tablespace to store indexes in */ String getIndexTablespace(); /** * @param indexTablespace the tablespace to store indexes in */ void setIndexTablespace(String indexTablespace); /** * @return The extra attributes for this database connection */ Map<String, String> getAttributes(); /** * Set extra attributes on this database connection * * @param attributes The extra attributes to set on this database connection. */ void setAttributes(Map<String, String> attributes); /** * Add extra attribute on this connection * * @param attributeId the attribute identifier * @param value the value of the attribute */ default void addAttribute(String attributeId, String value) { // Default implementation does nothing } /** * Gets an attribute from the connection * * @param attributeId the attribute identifier * @param defaultValue the default value in case the attribute is not found * @return the attribute value */ default String getAttribute(String attributeId, String defaultValue) { return ""; } /** * See if this database supports the setCharacterStream() method on a PreparedStatement. * * @return true if we can set a Stream on a field in a PreparedStatement. False if not. */ boolean isSupportsSetCharacterStream(); /** * @return Whether or not the database can use auto increment type of fields (pk) */ boolean isSupportsAutoInc(); /* Returns weather or not the database supports a custom SQL statement to perform delete operations */ boolean isSupportsCustomDeleteStmt(); /* Returns weather or not the database supports a custom SQL statement to perform update operations */ boolean isSupportsCustomUpdateStmt(); /** * Describe a Value as a field in the database. * * @param v The value to describe * @param tk The field that's going to be the technical key * @param pk The field that's going to be the primary key * @param useAutoIncrement Use autoincrement or not * @param addFieldName Add the fieldname to the definition or not * @param addCr Add a cariage return at the end of the definition or not. * @return a value described as a field in this database. */ String getFieldDefinition( IValueMeta v, String tk, String pk, boolean useAutoIncrement, boolean addFieldName, boolean addCr); /** * Get the list of possible access types for a database. * * @return the list of possible access types for a database. */ int[] getAccessTypeList(); /** * @return the default database port number */ int getDefaultDatabasePort(); /** * @return default extra Options */ Map<String, String> getDefaultOptions(); /** * @param nrRows The number of rows to which we want to limit the result of the query. * @return the clause after a select statement to limit the number of rows */ String getLimitClause(int nrRows); /** * Returns the minimal SQL to launch in order to determine the layout of the resultset for a given * database table * * @param tableName The name of the table to determine the layout for * @return The SQL to launch. */ String getSqlQueryFields(String tableName); /** * Get the not found technical key. * * @param useAutoIncrement Whether or not we want to use an auto increment field * @return the lowest possible technical key to be used as the NOT FOUND row in a slowly changing * dimension. */ int getNotFoundTK(boolean useAutoIncrement); /** * Obtain the name of the JDBC driver class that we need to use! * * @return the name of the JDBC driver class for the specific database */ String getDriverClass(); /** * @param hostname the hostname * @param port the port as a string * @param databaseName the database name * @return the URL to use for connecting to the database. * @throws HopDatabaseException in case a configuration error is detected. */ String getURL(String hostname, String port, String databaseName) throws HopDatabaseException; /** * @return true if the database supports sequences */ boolean isSupportsSequences(); /** * Get the SQL to get the next value of a sequence. * * @param sequenceName The sequence name * @return the SQL to get the next value of a sequence. */ String getSqlNextSequenceValue(String sequenceName); /** * Get the current value of a database sequence * * @param sequenceName The sequence to check * @return The current value of a database sequence */ String getSqlCurrentSequenceValue(String sequenceName); /** * Check if a sequence exists. * * @param sequenceName The sequence to check * @return The SQL to get the name of the sequence back from the databases data dictionary */ String getSqlSequenceExists(String sequenceName); /** * Checks whether or not the command setFetchSize() is supported by the JDBC driver... * * @return true is setFetchSize() is supported! */ boolean isFetchSizeSupported(); /** * @return true if the database supports transactions. */ boolean isSupportsTransactions(); /** * @return true if the database supports bitmap indexes */ boolean isSupportsBitmapIndex(); /** * @return true if the database JDBC driver supports the setLong command */ boolean isSupportsSetLong(); /** * @return true if the database supports schemas */ boolean isSupportsSchemas(); /** * @return true if the database supports catalogs */ boolean isSupportsCatalogs(); /** * @return true when the database engine supports empty transaction. (for example Informix does * not!) */ boolean isSupportsEmptyTransactions(); /** * Indicates the need to insert a placeholder (0) for auto increment fields. * * @return true if we need a placeholder for auto increment fields in insert statements. */ boolean isNeedsPlaceHolder(); /** * @return the function for Sum agrregate */ String getFunctionSum(); /** * @return the function for Average agrregate */ String getFunctionAverage(); /** * @return the function for Minimum agrregate */ String getFunctionMinimum(); /** * @return the function for Maximum agrregate */ String getFunctionMaximum(); /** * @return the function for Count agrregate */ String getFunctionCount(); /** * Get the schema-table combination to query the right table. Usually that is SCHEMA.TABLENAME, * however there are exceptions to this rule... * * @param schemaName The schema name * @param tablePart The table name * @return the schema-table combination to query the right table. */ String getSchemaTableCombination(String schemaName, String tablePart); /** * Get the maximum length of a text field for this database connection. This includes optional * CLOB, Memo and Text fields. (the maximum!) * * @return The maximum text field length for this database type. (mostly CLOB_LENGTH) */ int getMaxTextFieldLength(); /** * Get the maximum length of a text field (VARCHAR) for this database connection. If this size is * exceeded use a CLOB. * * @return The maximum VARCHAR field length for this database type. (mostly identical to * getMaxTextFieldLength() - CLOB_LENGTH) */ int getMaxVARCHARLength(); /** * Generates the SQL statement to add a column to the specified table * * @param tableName The table to add * @param v The column defined as a value * @param tk the name of the technical key field * @param useAutoIncrement whether or not this field uses auto increment * @param pk the name of the primary key field * @param semicolon whether or not to add a semi-colon behind the statement. * @return the SQL statement to add a column to the specified table */ String getAddColumnStatement( String tableName, IValueMeta v, String tk, boolean useAutoIncrement, String pk, boolean semicolon); /** * Generates the SQL statement to drop a column from the specified table * * @param tableName The table to add * @param v The column defined as a value * @param tk the name of the technical key field * @param useAutoIncrement whether or not this field uses auto increment * @param pk the name of the primary key field * @param semicolon whether or not to add a semi-colon behind the statement. * @return the SQL statement to drop a column from the specified table */ String getDropColumnStatement( String tableName, IValueMeta v, String tk, boolean useAutoIncrement, String pk, boolean semicolon); /** * Generates the SQL statement to modify a column in the specified table * * @param tableName The table to add * @param v The column defined as a value * @param tk the name of the technical key field * @param useAutoIncrement whether or not this field uses auto increment * @param pk the name of the primary key field * @param semicolon whether or not to add a semi-colon behind the statement. * @return the SQL statement to modify a column in the specified table */ String getModifyColumnStatement( String tableName, IValueMeta v, String tk, boolean useAutoIncrement, String pk, boolean semicolon); /** * Clone this database interface: copy all info to a new object * * @return the cloned Database Interface object. */ Object clone(); /** * @return an array of reserved words for the database type... */ String[] getReservedWords(); /** * @return true if reserved words need to be double quoted ("password", "select", ...) */ boolean isQuoteReservedWords(); /** * @return The start quote sequence, mostly just double quote, but sometimes [, ... */ String getStartQuote(); /** * @return The end quote sequence, mostly just double quote, but sometimes ], ... */ String getEndQuote(); /** * @return a list of table types to retrieve tables for the database This is mostly just { "TABLE" * } */ String[] getTableTypes(); /** * @return a list of table types to retrieve views for the database This is mostly just { "VIEW" } */ String[] getViewTypes(); /** * @return a list of table types to retrieve synonyms for the database */ String[] getSynonymTypes(); /** * @return true if we need to supply the schema-name to getTables in order to get a correct list * of items. */ boolean useSchemaNameForTableList(); /** * @return true if the database supports views */ boolean isSupportsViews(); /** * @return true if the database supports synonyms */ boolean isSupportsSynonyms(); /** * @return The SQL on this database to get a list of stored procedures. */ String getSqlListOfProcedures(); /** * @param tableName The table to be truncated. * @return The SQL statement to truncate a table: remove all rows from it without a transaction */ String getTruncateTableStatement(String tableName); /** * @return true if the database rounds floating point numbers to the right precision. For example * if the target field is number(7,2) the value 12.399999999 is converted into 12.40 */ boolean isSupportsFloatRoundingOnUpdate(); /** * @param tableNames The names of the tables to lock * @return The SQL command to lock database tables for write purposes. null is returned in case * locking is not supported on the target database. */ String getSqlLockTables(String[] tableNames); /** * @param tableNames The names of the tables to unlock * @return The SQL command to unlock the database tables. null is returned in case locking is not * supported on the target database. */ String getSqlUnlockTables(String[] tableNames); /** * @return true if the database resultsets support getTimeStamp() to retrieve date-time. (Date) */ boolean isSupportsTimeStampToDateConversion(); /** * @return true if the database JDBC driver supports batch updates For example Interbase doesn't * support this! */ boolean isSupportsBatchUpdates(); /** * @return true if the database supports a boolean, bit, logical, ... datatype */ boolean isSupportsBooleanDataType(); /** * @param b Set to true if the database supports a boolean, bit, logical, ... datatype */ void setSupportsBooleanDataType(boolean b); /** * @return true if reserved words' case should be preserved */ boolean isPreserveReservedCase(); /** * @param b Set to true if reserved words' case should be preserved */ void setPreserveReservedCase(boolean b); /** * @return true if the database defaults to naming tables and fields in upper case. True for most * databases except for stuborn stuff like Postgres ;-) */ boolean isDefaultingToUppercase(); /** * @return a map of all the extra URL options you want to set, retrieved from the attributes list * (NOT synchronized!) */ Map<String, String> getExtraOptions(); /** * Add an extra option to the attributes list * * @param databaseTypeCode The database type code for which the option applies * @param option The option to set * @param value The value of the option */ void addExtraOption(String databaseTypeCode, String option, String value); /** * @return The extra option separator in database URL for this platform (usually this is semicolon * ; ) */ String getExtraOptionSeparator(); /** * @return The extra option value separator in database URL for this platform (usually this is the * equal sign = ) */ String getExtraOptionValueSeparator(); /** * @return This indicator separates the normal URL from the options */ String getExtraOptionIndicator(); /** * @return true if the database supports connection options in the URL, false if they are put in a * Properties object. */ boolean isSupportsOptionsInURL(); /** * @return extra help text on the supported options on the selected database platform. */ String getExtraOptionsHelpText(); /** * @return true if the database JDBC driver supports getBlob on the resultset. If not we must use * getBytes() to get the data. */ boolean isSupportsGetBlob(); /** * @return The SQL to execute right after connecting */ String getConnectSql(); /** * @param sql The SQL to execute right after connecting */ void setConnectSql(String sql); /** * @return true if the database supports setting the maximum number of return rows in a resultset. */ boolean isSupportsSetMaxRows(); /** * @param tableName The table to verify the existance for * @return The SQL to execute to verify if the given table exists. If an Exception is thrown for * this SQL, we don't have the table. */ String getSqlTableExists(String tableName); /** * @param column The column to verify the existance for * @param tableName The table to verify the existance for * @return The SQL to execute to verify if the given table exists. If an Exception is thrown for * this SQL, we don't have the column. */ String getSqlColumnExists(String column, String tableName); /** * @return true if the database is streaming results (normally this is an option just for MySQL). */ boolean isStreamingResults(); /** * @param useStreaming true if we want the database to stream results (normally this is an option * just for MySQL). */ void setStreamingResults(boolean useStreaming); /** * @return true if all fields should always be quoted in db */ boolean isQuoteAllFields(); /** * @param quoteAllFields true if all fields in DB should be quoted. */ void setQuoteAllFields(boolean quoteAllFields); /** * @return true if all identifiers should be forced to lower case */ boolean isForcingIdentifiersToLowerCase(); /** * @param forceLowerCase true if all identifiers should be forced to lower case */ void setForcingIdentifiersToLowerCase(boolean forceLowerCase); /** * @return true if all identifiers should be forced to upper case */ boolean isForcingIdentifiersToUpperCase(); /** * @param forceUpperCase true if all identifiers should be forced to upper case */ void setForcingIdentifiersToUpperCase(boolean forceUpperCase); /** * @return true if we use a double decimal separator to specify schema/table combinations on * MS-SQL server */ boolean isUsingDoubleDecimalAsSchemaTableSeparator(); /** * @param useDoubleDecimalSeparator true if we should use a double decimal separator to specify * schema/table combinations on MS-SQL server */ void setUsingDoubleDecimalAsSchemaTableSeparator(boolean useDoubleDecimalSeparator); /** * @return true if this database needs a transaction to perform a query (auto-commit turned off). */ boolean isRequiringTransactionsOnQueries(); /** Handles the special case of Oracle where NUMBER(38) is interpreted as Integer or BigNumber */ boolean isStrictBigNumberInterpretation(); /** * You can use this method to supply an alternate factory for the test method in the dialogs. This * is useful for plugins like SAP/R3 and PALO. * * @return the name of the database test factory to use. */ String getDatabaseFactoryName(); /** * @return The preferred schema name of this database connection. */ String getPreferredSchemaName(); /** * @param preferredSchemaName The preferred schema name of this database connection. */ void setPreferredSchemaName(String preferredSchemaName); /** * Verifies on the specified database connection if an index exists on the fields with the * specified name. * * @param database * @param schemaName * @param tableName * @param idxFields * @return * @throws HopDatabaseException */ boolean hasIndex(Database database, String schemaName, String tableName, String[] idxFields) throws HopDatabaseException; /** * @return true if the database supports sequences with a maximum value option. The default is * true. */ boolean isSupportsSequenceNoMaxValueOption(); /** * @return true if we need to append the PRIMARY KEY block in the create table block after the * fields, required for Cache. */ boolean isRequiresCreateTablePrimaryKeyAppend(); /** * @return true if the database requires you to cast a parameter to varchar before comparing to * null. */ boolean isRequiresCastToVariousForIsNull(); /** * @return Handles the special case of DB2 where the display size returned is twice the precision. * In that case, the length is the precision. */ boolean isDisplaySizeTwiceThePrecision(); /** * Most databases allow you to retrieve result metadata by preparing a SELECT statement. * * @return true if the database supports retrieval of query metadata from a prepared statement. * False if the query needs to be executed first. */ boolean isSupportsPreparedStatementMetadataRetrieval(); /** * @param tableName * @return true if the specified table is a system table */ boolean isSystemTable(String tableName); /** * @return true if the database supports newlines in a SQL statements. */ boolean isSupportsNewLinesInSql(); /** * @return the SQL to retrieve the list of schemas */ String getSqlListOfSchemas(); /** * @return The maximum number of columns in a database, {@literal <=}0 means: no known limit */ int getMaxColumnsInIndex(); /** * @return true if the database supports error handling (recovery of failure) while doing batch * updates. */ boolean IsSupportsErrorHandlingOnBatchUpdates(); /** * Get the SQL to insert a new empty unknown record in a dimension. * * @param schemaTable the schema-table name to insert into * @param keyField The key field * @param versionField the version field * @return the SQL to insert the unknown record into the SCD. */ String getSqlInsertAutoIncUnknownDimensionRow( String schemaTable, String keyField, String versionField); /** * @return true if this is a relational database you can explore. Return false for SAP, PALO, etc. */ boolean isExplorable(); /** * @return true if this is a relational database for which the connection can be tested. */ boolean isTestable(); /** * @return true if this is a relational database for which exploring is disabled */ boolean isExploringDisabled(); /** * @return The SQL on this database to get a list of sequences. */ String getSqlListOfSequences(); /** * Adds quotes around the string according to the database dialect and also escapes special * characters like CR, LF and the quote character itself. * * @param string * @return A string that is properly quoted for use in a SQL statement (insert, update, delete, * etc) */ String quoteSqlString(String string); /** * Returns the SQL Statement that counts the number of rows in the table. * * @param tableName * @return */ String getSelectCountStatement(String tableName); /** * Generate a column alias given the column index and suggested name. * * @param columnIndex Index of the column in the query * @param suggestedName Suggested column name * @return Column alias that is valid for this database */ String generateColumnAlias(int columnIndex, String suggestedName); /** * Parse all possible statements from the provided SQL script. * * @param sqlScript Raw SQL Script to be parsed into executable statements. * @return List of parsed SQL statements to be executed separately. */ List<String> parseStatements(String sqlScript); /** * Parse the statements in the provided SQL script, provide more information about where each was * found in the script. * * @param sqlScript Raw SQL Script to be parsed into executable statements. * @return List of SQL script statements to be executed separately. */ List<SqlScriptStatement> getSqlScriptStatements(String sqlScript); /** * @return true if the database is a MySQL variant, like MySQL 5.1, InfiniDB, InfoBright, and so * on. */ boolean isMySqlVariant(); /** * @return true if the database is a Postgres variant like Postgres, Greenplum, Redshift, and so * on. */ boolean isPostgresVariant(); /** * @return true if the database is a Sybase variant. */ boolean isSybaseVariant(); /** * @return true if the database is a SybaseIQ variant. */ boolean isSybaseIQVariant(); /** * @return true if the database is a neoview variant. */ boolean isNeoviewVariant(); /** * @return true if the database is a DuckDB variant. */ boolean isDuckDbVariant(); /** * @return true if the database is a DuckDB variant. */ boolean isExasolVariant(); /** * @return true if the database is an Informix variant. */ boolean isInformixVariant(); /** * @return true if the database is a MS SQL Server (native) variant. */ boolean isMsSqlServerNativeVariant(); /** * @return true if the database is a MS SQL Server variant. */ boolean isMsSqlServerVariant(); /** * @return true if the database is an Oracle variant. */ boolean isOracleVariant(); /** * @return true if the database is a Netezza variant. */ boolean isNetezzaVariant(); /** * @return true if the database is a SQLite variant. */ boolean isSqliteVariant(); /** * @return true if the database is a Terradata variant. */ boolean isTeradataVariant(); /** * Returns a true if savepoints can be released, false if not. * * @return */ boolean isReleaseSavepoint(); /** * Returns the tablespace DDL fragment for a "Data" tablespace. In most databases that use * tablespaces this is where the tables are to be created. * * @param variables variables used for possible substitution * @param databaseMeta databaseMeta the database meta used for possible string enclosure of the * tablespace. This method needs this as this is done after environmental substitution. * @return String the tablespace name for tables in the format "tablespace TABLESPACE_NAME". The * TABLESPACE_NAME and the passed DatabaseMata determines if TABLESPACE_NAME is to be enclosed * in quotes. */ String getDataTablespaceDDL(IVariables variables, DatabaseMeta databaseMeta); /** * Returns the tablespace DDL fragment for an "Index" tablespace. * * @param variables variables used for possible substitution * @param databaseMeta databaseMeta the database meta used for possible string enclosure of the * tablespace. This method needs this as this is done after environmental substitution. * @return String the tablespace name for indicis in the format "tablespace TABLESPACE_NAME". The * TABLESPACE_NAME and the passed DatabaseMata determines if TABLESPACE_NAME is to be enclosed * in quotes. */ String getIndexTablespaceDDL(IVariables variables, DatabaseMeta databaseMeta); /** * This method allows a database dialect to convert database specific data types to Hop data * types. * * @param resultSet The result set to use * @param valueMeta The description of the value to retrieve * @param index the index on which we need to retrieve the value, 0-based. * @return The correctly converted Hop data type corresponding to the valueMeta description. * @throws HopDatabaseException */ Object getValueFromResultSet(ResultSet resultSet, IValueMeta valueMeta, int index) throws HopDatabaseException; /** * @return true if the database supports the use of safe-points and if it is appropriate to ever * use it (default to false) */ boolean isUseSafePoints(); /** * @return true if the database supports error handling (the default). Returns false for certain * databases (SQLite) that invalidate a prepared statement or even the complete connection * when an error occurs. */ boolean isSupportsErrorHandling(); /** * Convert a value in the SQL equivalent. For example, convert String "Apache" into 'Apache' or * into Oracle date format TO_DATE('2012/08/16 15:36:59', 'YYYY/MM/DD HH24:MI:SS') * * @param valueMeta The description of the value. The date format used is taken from this value * unless dateFormat is specified (not null or empty) * @param valueData The data to convert. * @return The value SQL clause * @throws HopValueException in case there is a data conversion error. */ String getSqlValue(IValueMeta valueMeta, Object valueData, String dateFormat) throws HopValueException; /** * Get the DELETE statement for the current database given the table name * * @param tableName * @return */ String getSqlDeleteStmt(String tableName); /** * Get the UPDATE statement for the current database given the table name * * @param tableName * @return */ String getSqlUpdateStmt(String tableName); /** * @return true if this database only supports metadata retrieval on a result set, never on a * statement (even if the statement has been executed) */ boolean isSupportsResultSetMetadataRetrievalOnly(); /** * @return true if the database supports the Timestamp data type (nanosecond precision and all) */ boolean isSupportsTimestampDataType(); /** * @param b Set to true if the database supports the Timestamp data type (nanosecond precision and * all) */ void setSupportsTimestampDataType(boolean b); /** * Given a String, this will sanitize and return a value safe for usage as a column name * * @param fieldname value to sanitize * @return a String safe for usage as a column name without the need for quoting */ String getSafeFieldname(String fieldname); /** * @return true if the database supports sequences with a maximum value option. The default is * true. */ String getSequenceNoMaxValueOption(); /** * @return true if the database supports autoGeneratedKeys */ boolean isSupportsAutoGeneratedKeys(); /** * Customizes the IValueMeta defined in the base * * @param v the determined iValueMeta * @param rm the sql column type * @param index the index to the column to customize * @return IValueMeta customized with the data base specific types */ IValueMeta customizeValueFromSqlType(IValueMeta v, ResultSetMetaData rm, int index) throws SQLException; /** * Customizes the IValueMeta defined in the base * * @return String the create table statement */ String getCreateTableStatement(); /** Set default options for this database */ default void addDefaultOptions() { // Default implementation does nothing } /** * Create SqlScriptParser for current database dialect * * @return instance of SqlScriptParser for current database dialect */ default SqlScriptParser createSqlScriptParser() { return new SqlScriptParser(true); } /** * @return true if database supports the standard table output transform */ default boolean supportsStandardTableOutput() { return true; } /** * @return the unsupported message if database does not support standard table output transform */ default String getUnsupportedTableOutputMessage() { return ""; } /** * Allows to get the column name for JDBC drivers with different behavior for aliases depending on * the connector version. * * @param dbMetaData * @param rsMetaData * @param index * @return empty if the database doesn't support the legacy column name feature * @throws HopDatabaseException */ default String getLegacyColumnName( DatabaseMetaData dbMetaData, ResultSetMetaData rsMetaData, int index) throws HopDatabaseException { return ""; } /** * Forms the drop table statement specific for a certain RDBMS. * * @param tableName Name of the table to drop * @return Drop table statement specific for the current database */ String getDropTableIfExistsStatement(String tableName); /** * Returns false if exception doesn't require full exception log. Could be used in cases of DB * vendor specific error which doesn't require stack trace log. * * @param e exception to check * @return decision result */ boolean isFullExceptionLog(Exception e); String getPort(); void setPort(String port); /** * @return A manually entered URL which will be used over the internally generated one */ String getManualUrl(); /** * @param manualUrl A manually entered URL which will be used over the internally generated one */ void setManualUrl(String manualUrl); /** * @return true if the database name is a required parameter */ boolean isRequiresName(); /** * If the database requires it you can generate an additional clause before the 'fields' * specification in an insert statement. For example, you might have INSERT INTO table1(field1, * field2, field3) but you need to include a PARTITION clause. That's what you can do right here. * If you make this method return PARTITION(field4) you will get INSERT INTO table1 * PARTITION(field4) (field1, field2, field3). * * @param variables the variables to resolve with. * @param schemaTable The schema-table name combination (Fully qualified table name) to generate * the clause for. */ String getSqlInsertClauseBeforeFields(IVariables variables, String schemaTable); /** * Returns a list of UI element IDs that should be excluded from the database editor. Databricks * doesn't need database name or manual URL fields. * * @return List of element IDs to exclude */ List<String> getRemoveItems(); /** * Returns whether URL information should be hidden in test connection dialogs. Databricks URLs * may contain sensitive authentication tokens. * * @return true to hide URL information in test connection results */ boolean isHideUrlInTestConnection(); }
googleapis/google-cloud-java
35,985
java-certificate-manager/proto-google-cloud-certificate-manager-v1/src/main/java/com/google/cloud/certificatemanager/v1/UpdateTrustConfigRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/certificatemanager/v1/trust_config.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.certificatemanager.v1; /** * * * <pre> * Request for the `UpdateTrustConfig` method. * </pre> * * Protobuf type {@code google.cloud.certificatemanager.v1.UpdateTrustConfigRequest} */ public final class UpdateTrustConfigRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.certificatemanager.v1.UpdateTrustConfigRequest) UpdateTrustConfigRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateTrustConfigRequest.newBuilder() to construct. private UpdateTrustConfigRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateTrustConfigRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateTrustConfigRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.certificatemanager.v1.TrustConifgProto .internal_static_google_cloud_certificatemanager_v1_UpdateTrustConfigRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.certificatemanager.v1.TrustConifgProto .internal_static_google_cloud_certificatemanager_v1_UpdateTrustConfigRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest.class, com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest.Builder.class); } private int bitField0_; public static final int TRUST_CONFIG_FIELD_NUMBER = 1; private com.google.cloud.certificatemanager.v1.TrustConfig trustConfig_; /** * * * <pre> * Required. A definition of the TrustConfig to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.TrustConfig trust_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the trustConfig field is set. */ @java.lang.Override public boolean hasTrustConfig() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. A definition of the TrustConfig to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.TrustConfig trust_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The trustConfig. */ @java.lang.Override public com.google.cloud.certificatemanager.v1.TrustConfig getTrustConfig() { return trustConfig_ == null ? com.google.cloud.certificatemanager.v1.TrustConfig.getDefaultInstance() : trustConfig_; } /** * * * <pre> * Required. A definition of the TrustConfig to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.TrustConfig trust_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.certificatemanager.v1.TrustConfigOrBuilder getTrustConfigOrBuilder() { return trustConfig_ == null ? com.google.cloud.certificatemanager.v1.TrustConfig.getDefaultInstance() : trustConfig_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getTrustConfig()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getTrustConfig()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest)) { return super.equals(obj); } com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest other = (com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest) obj; if (hasTrustConfig() != other.hasTrustConfig()) return false; if (hasTrustConfig()) { if (!getTrustConfig().equals(other.getTrustConfig())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasTrustConfig()) { hash = (37 * hash) + TRUST_CONFIG_FIELD_NUMBER; hash = (53 * hash) + getTrustConfig().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request for the `UpdateTrustConfig` method. * </pre> * * Protobuf type {@code google.cloud.certificatemanager.v1.UpdateTrustConfigRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.certificatemanager.v1.UpdateTrustConfigRequest) com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.certificatemanager.v1.TrustConifgProto .internal_static_google_cloud_certificatemanager_v1_UpdateTrustConfigRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.certificatemanager.v1.TrustConifgProto .internal_static_google_cloud_certificatemanager_v1_UpdateTrustConfigRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest.class, com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest.Builder.class); } // Construct using com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getTrustConfigFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; trustConfig_ = null; if (trustConfigBuilder_ != null) { trustConfigBuilder_.dispose(); trustConfigBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.certificatemanager.v1.TrustConifgProto .internal_static_google_cloud_certificatemanager_v1_UpdateTrustConfigRequest_descriptor; } @java.lang.Override public com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest getDefaultInstanceForType() { return com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest build() { com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest buildPartial() { com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest result = new com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.trustConfig_ = trustConfigBuilder_ == null ? trustConfig_ : trustConfigBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest) { return mergeFrom((com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest other) { if (other == com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest.getDefaultInstance()) return this; if (other.hasTrustConfig()) { mergeTrustConfig(other.getTrustConfig()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getTrustConfigFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.certificatemanager.v1.TrustConfig trustConfig_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.certificatemanager.v1.TrustConfig, com.google.cloud.certificatemanager.v1.TrustConfig.Builder, com.google.cloud.certificatemanager.v1.TrustConfigOrBuilder> trustConfigBuilder_; /** * * * <pre> * Required. A definition of the TrustConfig to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.TrustConfig trust_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the trustConfig field is set. */ public boolean hasTrustConfig() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. A definition of the TrustConfig to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.TrustConfig trust_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The trustConfig. */ public com.google.cloud.certificatemanager.v1.TrustConfig getTrustConfig() { if (trustConfigBuilder_ == null) { return trustConfig_ == null ? com.google.cloud.certificatemanager.v1.TrustConfig.getDefaultInstance() : trustConfig_; } else { return trustConfigBuilder_.getMessage(); } } /** * * * <pre> * Required. A definition of the TrustConfig to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.TrustConfig trust_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setTrustConfig(com.google.cloud.certificatemanager.v1.TrustConfig value) { if (trustConfigBuilder_ == null) { if (value == null) { throw new NullPointerException(); } trustConfig_ = value; } else { trustConfigBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. A definition of the TrustConfig to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.TrustConfig trust_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setTrustConfig( com.google.cloud.certificatemanager.v1.TrustConfig.Builder builderForValue) { if (trustConfigBuilder_ == null) { trustConfig_ = builderForValue.build(); } else { trustConfigBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. A definition of the TrustConfig to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.TrustConfig trust_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeTrustConfig(com.google.cloud.certificatemanager.v1.TrustConfig value) { if (trustConfigBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && trustConfig_ != null && trustConfig_ != com.google.cloud.certificatemanager.v1.TrustConfig.getDefaultInstance()) { getTrustConfigBuilder().mergeFrom(value); } else { trustConfig_ = value; } } else { trustConfigBuilder_.mergeFrom(value); } if (trustConfig_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. A definition of the TrustConfig to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.TrustConfig trust_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearTrustConfig() { bitField0_ = (bitField0_ & ~0x00000001); trustConfig_ = null; if (trustConfigBuilder_ != null) { trustConfigBuilder_.dispose(); trustConfigBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. A definition of the TrustConfig to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.TrustConfig trust_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.certificatemanager.v1.TrustConfig.Builder getTrustConfigBuilder() { bitField0_ |= 0x00000001; onChanged(); return getTrustConfigFieldBuilder().getBuilder(); } /** * * * <pre> * Required. A definition of the TrustConfig to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.TrustConfig trust_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.certificatemanager.v1.TrustConfigOrBuilder getTrustConfigOrBuilder() { if (trustConfigBuilder_ != null) { return trustConfigBuilder_.getMessageOrBuilder(); } else { return trustConfig_ == null ? com.google.cloud.certificatemanager.v1.TrustConfig.getDefaultInstance() : trustConfig_; } } /** * * * <pre> * Required. A definition of the TrustConfig to update. * </pre> * * <code> * .google.cloud.certificatemanager.v1.TrustConfig trust_config = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.certificatemanager.v1.TrustConfig, com.google.cloud.certificatemanager.v1.TrustConfig.Builder, com.google.cloud.certificatemanager.v1.TrustConfigOrBuilder> getTrustConfigFieldBuilder() { if (trustConfigBuilder_ == null) { trustConfigBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.certificatemanager.v1.TrustConfig, com.google.cloud.certificatemanager.v1.TrustConfig.Builder, com.google.cloud.certificatemanager.v1.TrustConfigOrBuilder>( getTrustConfig(), getParentForChildren(), isClean()); trustConfig_ = null; } return trustConfigBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Required. The update mask applies to the resource. For the `FieldMask` * definition, see * https://developers.google.com/protocol-buffers/docs/reference/google.protobuf#fieldmask. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.certificatemanager.v1.UpdateTrustConfigRequest) } // @@protoc_insertion_point(class_scope:google.cloud.certificatemanager.v1.UpdateTrustConfigRequest) private static final com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest(); } public static com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateTrustConfigRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateTrustConfigRequest>() { @java.lang.Override public UpdateTrustConfigRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateTrustConfigRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateTrustConfigRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.certificatemanager.v1.UpdateTrustConfigRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
35,868
java-dataproc/proto-google-cloud-dataproc-v1/src/main/java/com/google/cloud/dataproc/v1/InstanceReference.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/dataproc/v1/clusters.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.dataproc.v1; /** * * * <pre> * A reference to a Compute Engine instance. * </pre> * * Protobuf type {@code google.cloud.dataproc.v1.InstanceReference} */ public final class InstanceReference extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.dataproc.v1.InstanceReference) InstanceReferenceOrBuilder { private static final long serialVersionUID = 0L; // Use InstanceReference.newBuilder() to construct. private InstanceReference(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private InstanceReference() { instanceName_ = ""; instanceId_ = ""; publicKey_ = ""; publicEciesKey_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new InstanceReference(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataproc.v1.ClustersProto .internal_static_google_cloud_dataproc_v1_InstanceReference_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataproc.v1.ClustersProto .internal_static_google_cloud_dataproc_v1_InstanceReference_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataproc.v1.InstanceReference.class, com.google.cloud.dataproc.v1.InstanceReference.Builder.class); } public static final int INSTANCE_NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object instanceName_ = ""; /** * * * <pre> * The user-friendly name of the Compute Engine instance. * </pre> * * <code>string instance_name = 1;</code> * * @return The instanceName. */ @java.lang.Override public java.lang.String getInstanceName() { java.lang.Object ref = instanceName_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); instanceName_ = s; return s; } } /** * * * <pre> * The user-friendly name of the Compute Engine instance. * </pre> * * <code>string instance_name = 1;</code> * * @return The bytes for instanceName. */ @java.lang.Override public com.google.protobuf.ByteString getInstanceNameBytes() { java.lang.Object ref = instanceName_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); instanceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int INSTANCE_ID_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object instanceId_ = ""; /** * * * <pre> * The unique identifier of the Compute Engine instance. * </pre> * * <code>string instance_id = 2;</code> * * @return The instanceId. */ @java.lang.Override public java.lang.String getInstanceId() { java.lang.Object ref = instanceId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); instanceId_ = s; return s; } } /** * * * <pre> * The unique identifier of the Compute Engine instance. * </pre> * * <code>string instance_id = 2;</code> * * @return The bytes for instanceId. */ @java.lang.Override public com.google.protobuf.ByteString getInstanceIdBytes() { java.lang.Object ref = instanceId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); instanceId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PUBLIC_KEY_FIELD_NUMBER = 3; @SuppressWarnings("serial") private volatile java.lang.Object publicKey_ = ""; /** * * * <pre> * The public RSA key used for sharing data with this instance. * </pre> * * <code>string public_key = 3;</code> * * @return The publicKey. */ @java.lang.Override public java.lang.String getPublicKey() { java.lang.Object ref = publicKey_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); publicKey_ = s; return s; } } /** * * * <pre> * The public RSA key used for sharing data with this instance. * </pre> * * <code>string public_key = 3;</code> * * @return The bytes for publicKey. */ @java.lang.Override public com.google.protobuf.ByteString getPublicKeyBytes() { java.lang.Object ref = publicKey_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); publicKey_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PUBLIC_ECIES_KEY_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object publicEciesKey_ = ""; /** * * * <pre> * The public ECIES key used for sharing data with this instance. * </pre> * * <code>string public_ecies_key = 4;</code> * * @return The publicEciesKey. */ @java.lang.Override public java.lang.String getPublicEciesKey() { java.lang.Object ref = publicEciesKey_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); publicEciesKey_ = s; return s; } } /** * * * <pre> * The public ECIES key used for sharing data with this instance. * </pre> * * <code>string public_ecies_key = 4;</code> * * @return The bytes for publicEciesKey. */ @java.lang.Override public com.google.protobuf.ByteString getPublicEciesKeyBytes() { java.lang.Object ref = publicEciesKey_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); publicEciesKey_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceName_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, instanceName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, instanceId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(publicKey_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 3, publicKey_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(publicEciesKey_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, publicEciesKey_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceName_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, instanceName_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(instanceId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, instanceId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(publicKey_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(3, publicKey_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(publicEciesKey_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, publicEciesKey_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.dataproc.v1.InstanceReference)) { return super.equals(obj); } com.google.cloud.dataproc.v1.InstanceReference other = (com.google.cloud.dataproc.v1.InstanceReference) obj; if (!getInstanceName().equals(other.getInstanceName())) return false; if (!getInstanceId().equals(other.getInstanceId())) return false; if (!getPublicKey().equals(other.getPublicKey())) return false; if (!getPublicEciesKey().equals(other.getPublicEciesKey())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + INSTANCE_NAME_FIELD_NUMBER; hash = (53 * hash) + getInstanceName().hashCode(); hash = (37 * hash) + INSTANCE_ID_FIELD_NUMBER; hash = (53 * hash) + getInstanceId().hashCode(); hash = (37 * hash) + PUBLIC_KEY_FIELD_NUMBER; hash = (53 * hash) + getPublicKey().hashCode(); hash = (37 * hash) + PUBLIC_ECIES_KEY_FIELD_NUMBER; hash = (53 * hash) + getPublicEciesKey().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.dataproc.v1.InstanceReference parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataproc.v1.InstanceReference parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataproc.v1.InstanceReference parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataproc.v1.InstanceReference parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataproc.v1.InstanceReference parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.dataproc.v1.InstanceReference parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.dataproc.v1.InstanceReference parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataproc.v1.InstanceReference parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataproc.v1.InstanceReference parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.dataproc.v1.InstanceReference parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.dataproc.v1.InstanceReference parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.dataproc.v1.InstanceReference parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.cloud.dataproc.v1.InstanceReference prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * A reference to a Compute Engine instance. * </pre> * * Protobuf type {@code google.cloud.dataproc.v1.InstanceReference} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.dataproc.v1.InstanceReference) com.google.cloud.dataproc.v1.InstanceReferenceOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.dataproc.v1.ClustersProto .internal_static_google_cloud_dataproc_v1_InstanceReference_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.dataproc.v1.ClustersProto .internal_static_google_cloud_dataproc_v1_InstanceReference_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.dataproc.v1.InstanceReference.class, com.google.cloud.dataproc.v1.InstanceReference.Builder.class); } // Construct using com.google.cloud.dataproc.v1.InstanceReference.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; instanceName_ = ""; instanceId_ = ""; publicKey_ = ""; publicEciesKey_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.dataproc.v1.ClustersProto .internal_static_google_cloud_dataproc_v1_InstanceReference_descriptor; } @java.lang.Override public com.google.cloud.dataproc.v1.InstanceReference getDefaultInstanceForType() { return com.google.cloud.dataproc.v1.InstanceReference.getDefaultInstance(); } @java.lang.Override public com.google.cloud.dataproc.v1.InstanceReference build() { com.google.cloud.dataproc.v1.InstanceReference result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.dataproc.v1.InstanceReference buildPartial() { com.google.cloud.dataproc.v1.InstanceReference result = new com.google.cloud.dataproc.v1.InstanceReference(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.cloud.dataproc.v1.InstanceReference result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.instanceName_ = instanceName_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.instanceId_ = instanceId_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.publicKey_ = publicKey_; } if (((from_bitField0_ & 0x00000008) != 0)) { result.publicEciesKey_ = publicEciesKey_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.dataproc.v1.InstanceReference) { return mergeFrom((com.google.cloud.dataproc.v1.InstanceReference) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.dataproc.v1.InstanceReference other) { if (other == com.google.cloud.dataproc.v1.InstanceReference.getDefaultInstance()) return this; if (!other.getInstanceName().isEmpty()) { instanceName_ = other.instanceName_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getInstanceId().isEmpty()) { instanceId_ = other.instanceId_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getPublicKey().isEmpty()) { publicKey_ = other.publicKey_; bitField0_ |= 0x00000004; onChanged(); } if (!other.getPublicEciesKey().isEmpty()) { publicEciesKey_ = other.publicEciesKey_; bitField0_ |= 0x00000008; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { instanceName_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { instanceId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 26: { publicKey_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 26 case 34: { publicEciesKey_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000008; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object instanceName_ = ""; /** * * * <pre> * The user-friendly name of the Compute Engine instance. * </pre> * * <code>string instance_name = 1;</code> * * @return The instanceName. */ public java.lang.String getInstanceName() { java.lang.Object ref = instanceName_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); instanceName_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The user-friendly name of the Compute Engine instance. * </pre> * * <code>string instance_name = 1;</code> * * @return The bytes for instanceName. */ public com.google.protobuf.ByteString getInstanceNameBytes() { java.lang.Object ref = instanceName_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); instanceName_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The user-friendly name of the Compute Engine instance. * </pre> * * <code>string instance_name = 1;</code> * * @param value The instanceName to set. * @return This builder for chaining. */ public Builder setInstanceName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } instanceName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * The user-friendly name of the Compute Engine instance. * </pre> * * <code>string instance_name = 1;</code> * * @return This builder for chaining. */ public Builder clearInstanceName() { instanceName_ = getDefaultInstance().getInstanceName(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * The user-friendly name of the Compute Engine instance. * </pre> * * <code>string instance_name = 1;</code> * * @param value The bytes for instanceName to set. * @return This builder for chaining. */ public Builder setInstanceNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); instanceName_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object instanceId_ = ""; /** * * * <pre> * The unique identifier of the Compute Engine instance. * </pre> * * <code>string instance_id = 2;</code> * * @return The instanceId. */ public java.lang.String getInstanceId() { java.lang.Object ref = instanceId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); instanceId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The unique identifier of the Compute Engine instance. * </pre> * * <code>string instance_id = 2;</code> * * @return The bytes for instanceId. */ public com.google.protobuf.ByteString getInstanceIdBytes() { java.lang.Object ref = instanceId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); instanceId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The unique identifier of the Compute Engine instance. * </pre> * * <code>string instance_id = 2;</code> * * @param value The instanceId to set. * @return This builder for chaining. */ public Builder setInstanceId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } instanceId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The unique identifier of the Compute Engine instance. * </pre> * * <code>string instance_id = 2;</code> * * @return This builder for chaining. */ public Builder clearInstanceId() { instanceId_ = getDefaultInstance().getInstanceId(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * The unique identifier of the Compute Engine instance. * </pre> * * <code>string instance_id = 2;</code> * * @param value The bytes for instanceId to set. * @return This builder for chaining. */ public Builder setInstanceIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); instanceId_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object publicKey_ = ""; /** * * * <pre> * The public RSA key used for sharing data with this instance. * </pre> * * <code>string public_key = 3;</code> * * @return The publicKey. */ public java.lang.String getPublicKey() { java.lang.Object ref = publicKey_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); publicKey_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The public RSA key used for sharing data with this instance. * </pre> * * <code>string public_key = 3;</code> * * @return The bytes for publicKey. */ public com.google.protobuf.ByteString getPublicKeyBytes() { java.lang.Object ref = publicKey_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); publicKey_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The public RSA key used for sharing data with this instance. * </pre> * * <code>string public_key = 3;</code> * * @param value The publicKey to set. * @return This builder for chaining. */ public Builder setPublicKey(java.lang.String value) { if (value == null) { throw new NullPointerException(); } publicKey_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The public RSA key used for sharing data with this instance. * </pre> * * <code>string public_key = 3;</code> * * @return This builder for chaining. */ public Builder clearPublicKey() { publicKey_ = getDefaultInstance().getPublicKey(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * The public RSA key used for sharing data with this instance. * </pre> * * <code>string public_key = 3;</code> * * @param value The bytes for publicKey to set. * @return This builder for chaining. */ public Builder setPublicKeyBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); publicKey_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } private java.lang.Object publicEciesKey_ = ""; /** * * * <pre> * The public ECIES key used for sharing data with this instance. * </pre> * * <code>string public_ecies_key = 4;</code> * * @return The publicEciesKey. */ public java.lang.String getPublicEciesKey() { java.lang.Object ref = publicEciesKey_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); publicEciesKey_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The public ECIES key used for sharing data with this instance. * </pre> * * <code>string public_ecies_key = 4;</code> * * @return The bytes for publicEciesKey. */ public com.google.protobuf.ByteString getPublicEciesKeyBytes() { java.lang.Object ref = publicEciesKey_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); publicEciesKey_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The public ECIES key used for sharing data with this instance. * </pre> * * <code>string public_ecies_key = 4;</code> * * @param value The publicEciesKey to set. * @return This builder for chaining. */ public Builder setPublicEciesKey(java.lang.String value) { if (value == null) { throw new NullPointerException(); } publicEciesKey_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } /** * * * <pre> * The public ECIES key used for sharing data with this instance. * </pre> * * <code>string public_ecies_key = 4;</code> * * @return This builder for chaining. */ public Builder clearPublicEciesKey() { publicEciesKey_ = getDefaultInstance().getPublicEciesKey(); bitField0_ = (bitField0_ & ~0x00000008); onChanged(); return this; } /** * * * <pre> * The public ECIES key used for sharing data with this instance. * </pre> * * <code>string public_ecies_key = 4;</code> * * @param value The bytes for publicEciesKey to set. * @return This builder for chaining. */ public Builder setPublicEciesKeyBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); publicEciesKey_ = value; bitField0_ |= 0x00000008; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.dataproc.v1.InstanceReference) } // @@protoc_insertion_point(class_scope:google.cloud.dataproc.v1.InstanceReference) private static final com.google.cloud.dataproc.v1.InstanceReference DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.dataproc.v1.InstanceReference(); } public static com.google.cloud.dataproc.v1.InstanceReference getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<InstanceReference> PARSER = new com.google.protobuf.AbstractParser<InstanceReference>() { @java.lang.Override public InstanceReference parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<InstanceReference> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<InstanceReference> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.dataproc.v1.InstanceReference getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,183
java-iamcredentials/grpc-google-cloud-iamcredentials-v1/src/main/java/com/google/cloud/iam/credentials/v1/IAMCredentialsGrpc.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.cloud.iam.credentials.v1; import static io.grpc.MethodDescriptor.generateFullMethodName; /** * * * <pre> * A service account is a special type of Google account that belongs to your * application or a virtual machine (VM), instead of to an individual end user. * Your application assumes the identity of the service account to call Google * APIs, so that the users aren't directly involved. * Service account credentials are used to temporarily assume the identity * of the service account. Supported credential types include OAuth 2.0 access * tokens, OpenID Connect ID tokens, self-signed JSON Web Tokens (JWTs), and * more. * </pre> */ @javax.annotation.Generated( value = "by gRPC proto compiler", comments = "Source: google/iam/credentials/v1/iamcredentials.proto") @io.grpc.stub.annotations.GrpcGenerated public final class IAMCredentialsGrpc { private IAMCredentialsGrpc() {} public static final java.lang.String SERVICE_NAME = "google.iam.credentials.v1.IAMCredentials"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor< com.google.cloud.iam.credentials.v1.GenerateAccessTokenRequest, com.google.cloud.iam.credentials.v1.GenerateAccessTokenResponse> getGenerateAccessTokenMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "GenerateAccessToken", requestType = com.google.cloud.iam.credentials.v1.GenerateAccessTokenRequest.class, responseType = com.google.cloud.iam.credentials.v1.GenerateAccessTokenResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.iam.credentials.v1.GenerateAccessTokenRequest, com.google.cloud.iam.credentials.v1.GenerateAccessTokenResponse> getGenerateAccessTokenMethod() { io.grpc.MethodDescriptor< com.google.cloud.iam.credentials.v1.GenerateAccessTokenRequest, com.google.cloud.iam.credentials.v1.GenerateAccessTokenResponse> getGenerateAccessTokenMethod; if ((getGenerateAccessTokenMethod = IAMCredentialsGrpc.getGenerateAccessTokenMethod) == null) { synchronized (IAMCredentialsGrpc.class) { if ((getGenerateAccessTokenMethod = IAMCredentialsGrpc.getGenerateAccessTokenMethod) == null) { IAMCredentialsGrpc.getGenerateAccessTokenMethod = getGenerateAccessTokenMethod = io.grpc.MethodDescriptor .<com.google.cloud.iam.credentials.v1.GenerateAccessTokenRequest, com.google.cloud.iam.credentials.v1.GenerateAccessTokenResponse> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName( generateFullMethodName(SERVICE_NAME, "GenerateAccessToken")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.iam.credentials.v1.GenerateAccessTokenRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.iam.credentials.v1.GenerateAccessTokenResponse .getDefaultInstance())) .setSchemaDescriptor( new IAMCredentialsMethodDescriptorSupplier("GenerateAccessToken")) .build(); } } } return getGenerateAccessTokenMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.iam.credentials.v1.GenerateIdTokenRequest, com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse> getGenerateIdTokenMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "GenerateIdToken", requestType = com.google.cloud.iam.credentials.v1.GenerateIdTokenRequest.class, responseType = com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.iam.credentials.v1.GenerateIdTokenRequest, com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse> getGenerateIdTokenMethod() { io.grpc.MethodDescriptor< com.google.cloud.iam.credentials.v1.GenerateIdTokenRequest, com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse> getGenerateIdTokenMethod; if ((getGenerateIdTokenMethod = IAMCredentialsGrpc.getGenerateIdTokenMethod) == null) { synchronized (IAMCredentialsGrpc.class) { if ((getGenerateIdTokenMethod = IAMCredentialsGrpc.getGenerateIdTokenMethod) == null) { IAMCredentialsGrpc.getGenerateIdTokenMethod = getGenerateIdTokenMethod = io.grpc.MethodDescriptor .<com.google.cloud.iam.credentials.v1.GenerateIdTokenRequest, com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "GenerateIdToken")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.iam.credentials.v1.GenerateIdTokenRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse .getDefaultInstance())) .setSchemaDescriptor( new IAMCredentialsMethodDescriptorSupplier("GenerateIdToken")) .build(); } } } return getGenerateIdTokenMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.iam.credentials.v1.SignBlobRequest, com.google.cloud.iam.credentials.v1.SignBlobResponse> getSignBlobMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "SignBlob", requestType = com.google.cloud.iam.credentials.v1.SignBlobRequest.class, responseType = com.google.cloud.iam.credentials.v1.SignBlobResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.iam.credentials.v1.SignBlobRequest, com.google.cloud.iam.credentials.v1.SignBlobResponse> getSignBlobMethod() { io.grpc.MethodDescriptor< com.google.cloud.iam.credentials.v1.SignBlobRequest, com.google.cloud.iam.credentials.v1.SignBlobResponse> getSignBlobMethod; if ((getSignBlobMethod = IAMCredentialsGrpc.getSignBlobMethod) == null) { synchronized (IAMCredentialsGrpc.class) { if ((getSignBlobMethod = IAMCredentialsGrpc.getSignBlobMethod) == null) { IAMCredentialsGrpc.getSignBlobMethod = getSignBlobMethod = io.grpc.MethodDescriptor .<com.google.cloud.iam.credentials.v1.SignBlobRequest, com.google.cloud.iam.credentials.v1.SignBlobResponse> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "SignBlob")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.iam.credentials.v1.SignBlobRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.iam.credentials.v1.SignBlobResponse .getDefaultInstance())) .setSchemaDescriptor(new IAMCredentialsMethodDescriptorSupplier("SignBlob")) .build(); } } } return getSignBlobMethod; } private static volatile io.grpc.MethodDescriptor< com.google.cloud.iam.credentials.v1.SignJwtRequest, com.google.cloud.iam.credentials.v1.SignJwtResponse> getSignJwtMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "SignJwt", requestType = com.google.cloud.iam.credentials.v1.SignJwtRequest.class, responseType = com.google.cloud.iam.credentials.v1.SignJwtResponse.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.cloud.iam.credentials.v1.SignJwtRequest, com.google.cloud.iam.credentials.v1.SignJwtResponse> getSignJwtMethod() { io.grpc.MethodDescriptor< com.google.cloud.iam.credentials.v1.SignJwtRequest, com.google.cloud.iam.credentials.v1.SignJwtResponse> getSignJwtMethod; if ((getSignJwtMethod = IAMCredentialsGrpc.getSignJwtMethod) == null) { synchronized (IAMCredentialsGrpc.class) { if ((getSignJwtMethod = IAMCredentialsGrpc.getSignJwtMethod) == null) { IAMCredentialsGrpc.getSignJwtMethod = getSignJwtMethod = io.grpc.MethodDescriptor .<com.google.cloud.iam.credentials.v1.SignJwtRequest, com.google.cloud.iam.credentials.v1.SignJwtResponse> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName(generateFullMethodName(SERVICE_NAME, "SignJwt")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.iam.credentials.v1.SignJwtRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.cloud.iam.credentials.v1.SignJwtResponse .getDefaultInstance())) .setSchemaDescriptor(new IAMCredentialsMethodDescriptorSupplier("SignJwt")) .build(); } } } return getSignJwtMethod; } /** Creates a new async stub that supports all call types for the service */ public static IAMCredentialsStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<IAMCredentialsStub> factory = new io.grpc.stub.AbstractStub.StubFactory<IAMCredentialsStub>() { @java.lang.Override public IAMCredentialsStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new IAMCredentialsStub(channel, callOptions); } }; return IAMCredentialsStub.newStub(factory, channel); } /** Creates a new blocking-style stub that supports all types of calls on the service */ public static IAMCredentialsBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<IAMCredentialsBlockingV2Stub> factory = new io.grpc.stub.AbstractStub.StubFactory<IAMCredentialsBlockingV2Stub>() { @java.lang.Override public IAMCredentialsBlockingV2Stub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new IAMCredentialsBlockingV2Stub(channel, callOptions); } }; return IAMCredentialsBlockingV2Stub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static IAMCredentialsBlockingStub newBlockingStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<IAMCredentialsBlockingStub> factory = new io.grpc.stub.AbstractStub.StubFactory<IAMCredentialsBlockingStub>() { @java.lang.Override public IAMCredentialsBlockingStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new IAMCredentialsBlockingStub(channel, callOptions); } }; return IAMCredentialsBlockingStub.newStub(factory, channel); } /** Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static IAMCredentialsFutureStub newFutureStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<IAMCredentialsFutureStub> factory = new io.grpc.stub.AbstractStub.StubFactory<IAMCredentialsFutureStub>() { @java.lang.Override public IAMCredentialsFutureStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new IAMCredentialsFutureStub(channel, callOptions); } }; return IAMCredentialsFutureStub.newStub(factory, channel); } /** * * * <pre> * A service account is a special type of Google account that belongs to your * application or a virtual machine (VM), instead of to an individual end user. * Your application assumes the identity of the service account to call Google * APIs, so that the users aren't directly involved. * Service account credentials are used to temporarily assume the identity * of the service account. Supported credential types include OAuth 2.0 access * tokens, OpenID Connect ID tokens, self-signed JSON Web Tokens (JWTs), and * more. * </pre> */ public interface AsyncService { /** * * * <pre> * Generates an OAuth 2.0 access token for a service account. * </pre> */ default void generateAccessToken( com.google.cloud.iam.credentials.v1.GenerateAccessTokenRequest request, io.grpc.stub.StreamObserver<com.google.cloud.iam.credentials.v1.GenerateAccessTokenResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getGenerateAccessTokenMethod(), responseObserver); } /** * * * <pre> * Generates an OpenID Connect ID token for a service account. * </pre> */ default void generateIdToken( com.google.cloud.iam.credentials.v1.GenerateIdTokenRequest request, io.grpc.stub.StreamObserver<com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getGenerateIdTokenMethod(), responseObserver); } /** * * * <pre> * Signs a blob using a service account's system-managed private key. * </pre> */ default void signBlob( com.google.cloud.iam.credentials.v1.SignBlobRequest request, io.grpc.stub.StreamObserver<com.google.cloud.iam.credentials.v1.SignBlobResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getSignBlobMethod(), responseObserver); } /** * * * <pre> * Signs a JWT using a service account's system-managed private key. * </pre> */ default void signJwt( com.google.cloud.iam.credentials.v1.SignJwtRequest request, io.grpc.stub.StreamObserver<com.google.cloud.iam.credentials.v1.SignJwtResponse> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall(getSignJwtMethod(), responseObserver); } } /** * Base class for the server implementation of the service IAMCredentials. * * <pre> * A service account is a special type of Google account that belongs to your * application or a virtual machine (VM), instead of to an individual end user. * Your application assumes the identity of the service account to call Google * APIs, so that the users aren't directly involved. * Service account credentials are used to temporarily assume the identity * of the service account. Supported credential types include OAuth 2.0 access * tokens, OpenID Connect ID tokens, self-signed JSON Web Tokens (JWTs), and * more. * </pre> */ public abstract static class IAMCredentialsImplBase implements io.grpc.BindableService, AsyncService { @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return IAMCredentialsGrpc.bindService(this); } } /** * A stub to allow clients to do asynchronous rpc calls to service IAMCredentials. * * <pre> * A service account is a special type of Google account that belongs to your * application or a virtual machine (VM), instead of to an individual end user. * Your application assumes the identity of the service account to call Google * APIs, so that the users aren't directly involved. * Service account credentials are used to temporarily assume the identity * of the service account. Supported credential types include OAuth 2.0 access * tokens, OpenID Connect ID tokens, self-signed JSON Web Tokens (JWTs), and * more. * </pre> */ public static final class IAMCredentialsStub extends io.grpc.stub.AbstractAsyncStub<IAMCredentialsStub> { private IAMCredentialsStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected IAMCredentialsStub build(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new IAMCredentialsStub(channel, callOptions); } /** * * * <pre> * Generates an OAuth 2.0 access token for a service account. * </pre> */ public void generateAccessToken( com.google.cloud.iam.credentials.v1.GenerateAccessTokenRequest request, io.grpc.stub.StreamObserver<com.google.cloud.iam.credentials.v1.GenerateAccessTokenResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getGenerateAccessTokenMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Generates an OpenID Connect ID token for a service account. * </pre> */ public void generateIdToken( com.google.cloud.iam.credentials.v1.GenerateIdTokenRequest request, io.grpc.stub.StreamObserver<com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getGenerateIdTokenMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Signs a blob using a service account's system-managed private key. * </pre> */ public void signBlob( com.google.cloud.iam.credentials.v1.SignBlobRequest request, io.grpc.stub.StreamObserver<com.google.cloud.iam.credentials.v1.SignBlobResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getSignBlobMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Signs a JWT using a service account's system-managed private key. * </pre> */ public void signJwt( com.google.cloud.iam.credentials.v1.SignJwtRequest request, io.grpc.stub.StreamObserver<com.google.cloud.iam.credentials.v1.SignJwtResponse> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getSignJwtMethod(), getCallOptions()), request, responseObserver); } } /** * A stub to allow clients to do synchronous rpc calls to service IAMCredentials. * * <pre> * A service account is a special type of Google account that belongs to your * application or a virtual machine (VM), instead of to an individual end user. * Your application assumes the identity of the service account to call Google * APIs, so that the users aren't directly involved. * Service account credentials are used to temporarily assume the identity * of the service account. Supported credential types include OAuth 2.0 access * tokens, OpenID Connect ID tokens, self-signed JSON Web Tokens (JWTs), and * more. * </pre> */ public static final class IAMCredentialsBlockingV2Stub extends io.grpc.stub.AbstractBlockingStub<IAMCredentialsBlockingV2Stub> { private IAMCredentialsBlockingV2Stub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected IAMCredentialsBlockingV2Stub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new IAMCredentialsBlockingV2Stub(channel, callOptions); } /** * * * <pre> * Generates an OAuth 2.0 access token for a service account. * </pre> */ public com.google.cloud.iam.credentials.v1.GenerateAccessTokenResponse generateAccessToken( com.google.cloud.iam.credentials.v1.GenerateAccessTokenRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGenerateAccessTokenMethod(), getCallOptions(), request); } /** * * * <pre> * Generates an OpenID Connect ID token for a service account. * </pre> */ public com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse generateIdToken( com.google.cloud.iam.credentials.v1.GenerateIdTokenRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGenerateIdTokenMethod(), getCallOptions(), request); } /** * * * <pre> * Signs a blob using a service account's system-managed private key. * </pre> */ public com.google.cloud.iam.credentials.v1.SignBlobResponse signBlob( com.google.cloud.iam.credentials.v1.SignBlobRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getSignBlobMethod(), getCallOptions(), request); } /** * * * <pre> * Signs a JWT using a service account's system-managed private key. * </pre> */ public com.google.cloud.iam.credentials.v1.SignJwtResponse signJwt( com.google.cloud.iam.credentials.v1.SignJwtRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getSignJwtMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do limited synchronous rpc calls to service IAMCredentials. * * <pre> * A service account is a special type of Google account that belongs to your * application or a virtual machine (VM), instead of to an individual end user. * Your application assumes the identity of the service account to call Google * APIs, so that the users aren't directly involved. * Service account credentials are used to temporarily assume the identity * of the service account. Supported credential types include OAuth 2.0 access * tokens, OpenID Connect ID tokens, self-signed JSON Web Tokens (JWTs), and * more. * </pre> */ public static final class IAMCredentialsBlockingStub extends io.grpc.stub.AbstractBlockingStub<IAMCredentialsBlockingStub> { private IAMCredentialsBlockingStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected IAMCredentialsBlockingStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new IAMCredentialsBlockingStub(channel, callOptions); } /** * * * <pre> * Generates an OAuth 2.0 access token for a service account. * </pre> */ public com.google.cloud.iam.credentials.v1.GenerateAccessTokenResponse generateAccessToken( com.google.cloud.iam.credentials.v1.GenerateAccessTokenRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGenerateAccessTokenMethod(), getCallOptions(), request); } /** * * * <pre> * Generates an OpenID Connect ID token for a service account. * </pre> */ public com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse generateIdToken( com.google.cloud.iam.credentials.v1.GenerateIdTokenRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGenerateIdTokenMethod(), getCallOptions(), request); } /** * * * <pre> * Signs a blob using a service account's system-managed private key. * </pre> */ public com.google.cloud.iam.credentials.v1.SignBlobResponse signBlob( com.google.cloud.iam.credentials.v1.SignBlobRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getSignBlobMethod(), getCallOptions(), request); } /** * * * <pre> * Signs a JWT using a service account's system-managed private key. * </pre> */ public com.google.cloud.iam.credentials.v1.SignJwtResponse signJwt( com.google.cloud.iam.credentials.v1.SignJwtRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getSignJwtMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do ListenableFuture-style rpc calls to service IAMCredentials. * * <pre> * A service account is a special type of Google account that belongs to your * application or a virtual machine (VM), instead of to an individual end user. * Your application assumes the identity of the service account to call Google * APIs, so that the users aren't directly involved. * Service account credentials are used to temporarily assume the identity * of the service account. Supported credential types include OAuth 2.0 access * tokens, OpenID Connect ID tokens, self-signed JSON Web Tokens (JWTs), and * more. * </pre> */ public static final class IAMCredentialsFutureStub extends io.grpc.stub.AbstractFutureStub<IAMCredentialsFutureStub> { private IAMCredentialsFutureStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected IAMCredentialsFutureStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new IAMCredentialsFutureStub(channel, callOptions); } /** * * * <pre> * Generates an OAuth 2.0 access token for a service account. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.iam.credentials.v1.GenerateAccessTokenResponse> generateAccessToken( com.google.cloud.iam.credentials.v1.GenerateAccessTokenRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getGenerateAccessTokenMethod(), getCallOptions()), request); } /** * * * <pre> * Generates an OpenID Connect ID token for a service account. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse> generateIdToken(com.google.cloud.iam.credentials.v1.GenerateIdTokenRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getGenerateIdTokenMethod(), getCallOptions()), request); } /** * * * <pre> * Signs a blob using a service account's system-managed private key. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.iam.credentials.v1.SignBlobResponse> signBlob(com.google.cloud.iam.credentials.v1.SignBlobRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getSignBlobMethod(), getCallOptions()), request); } /** * * * <pre> * Signs a JWT using a service account's system-managed private key. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.cloud.iam.credentials.v1.SignJwtResponse> signJwt(com.google.cloud.iam.credentials.v1.SignJwtRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getSignJwtMethod(), getCallOptions()), request); } } private static final int METHODID_GENERATE_ACCESS_TOKEN = 0; private static final int METHODID_GENERATE_ID_TOKEN = 1; private static final int METHODID_SIGN_BLOB = 2; private static final int METHODID_SIGN_JWT = 3; private static final class MethodHandlers<Req, Resp> implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>, io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> { private final AsyncService serviceImpl; private final int methodId; MethodHandlers(AsyncService serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_GENERATE_ACCESS_TOKEN: serviceImpl.generateAccessToken( (com.google.cloud.iam.credentials.v1.GenerateAccessTokenRequest) request, (io.grpc.stub.StreamObserver< com.google.cloud.iam.credentials.v1.GenerateAccessTokenResponse>) responseObserver); break; case METHODID_GENERATE_ID_TOKEN: serviceImpl.generateIdToken( (com.google.cloud.iam.credentials.v1.GenerateIdTokenRequest) request, (io.grpc.stub.StreamObserver< com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse>) responseObserver); break; case METHODID_SIGN_BLOB: serviceImpl.signBlob( (com.google.cloud.iam.credentials.v1.SignBlobRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.iam.credentials.v1.SignBlobResponse>) responseObserver); break; case METHODID_SIGN_JWT: serviceImpl.signJwt( (com.google.cloud.iam.credentials.v1.SignJwtRequest) request, (io.grpc.stub.StreamObserver<com.google.cloud.iam.credentials.v1.SignJwtResponse>) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver<Req> invoke( io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getGenerateAccessTokenMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.iam.credentials.v1.GenerateAccessTokenRequest, com.google.cloud.iam.credentials.v1.GenerateAccessTokenResponse>( service, METHODID_GENERATE_ACCESS_TOKEN))) .addMethod( getGenerateIdTokenMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.iam.credentials.v1.GenerateIdTokenRequest, com.google.cloud.iam.credentials.v1.GenerateIdTokenResponse>( service, METHODID_GENERATE_ID_TOKEN))) .addMethod( getSignBlobMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.iam.credentials.v1.SignBlobRequest, com.google.cloud.iam.credentials.v1.SignBlobResponse>( service, METHODID_SIGN_BLOB))) .addMethod( getSignJwtMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.cloud.iam.credentials.v1.SignJwtRequest, com.google.cloud.iam.credentials.v1.SignJwtResponse>( service, METHODID_SIGN_JWT))) .build(); } private abstract static class IAMCredentialsBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { IAMCredentialsBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.google.cloud.iam.credentials.v1.IAMCredentialsProto.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("IAMCredentials"); } } private static final class IAMCredentialsFileDescriptorSupplier extends IAMCredentialsBaseDescriptorSupplier { IAMCredentialsFileDescriptorSupplier() {} } private static final class IAMCredentialsMethodDescriptorSupplier extends IAMCredentialsBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final java.lang.String methodName; IAMCredentialsMethodDescriptorSupplier(java.lang.String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (IAMCredentialsGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new IAMCredentialsFileDescriptorSupplier()) .addMethod(getGenerateAccessTokenMethod()) .addMethod(getGenerateIdTokenMethod()) .addMethod(getSignBlobMethod()) .addMethod(getSignJwtMethod()) .build(); } } } return result; } }
oracle/coherence
35,701
prj/test/functional/repository/src/main/java/repository/AbstractAsyncRepositoryTests.java
/* * Copyright (c) 2000, 2022, Oracle and/or its affiliates. * * Licensed under the Universal Permissive License v 1.0 as shown at * https://oss.oracle.com/licenses/upl. */ package repository; import com.oracle.coherence.repository.AbstractAsyncRepository; import com.oracle.coherence.repository.AbstractRepositoryBase; import com.tangosol.net.AsyncNamedMap; import com.tangosol.net.NamedMap; import com.tangosol.util.Extractors; import com.tangosol.util.Filter; import com.tangosol.util.Fragment; import com.tangosol.util.function.Remote; import com.tangosol.util.stream.RemoteCollectors; import data.pof.Address; import data.repository.Gender; import data.repository.Person; import java.math.BigDecimal; import java.time.LocalDate; import java.util.Collection; import java.util.Collections; import java.util.HashMap; import java.util.HashSet; import java.util.Map; import java.util.Optional; import java.util.Set; import java.util.SortedSet; import java.util.TreeMap; import java.util.concurrent.CountDownLatch; import java.util.concurrent.TimeUnit; import java.util.concurrent.atomic.AtomicInteger; import java.util.stream.Collectors; import java.util.stream.Stream; import org.hamcrest.Matchers; import org.junit.Assert; import org.junit.Before; import org.junit.Test; import static com.tangosol.util.Filters.always; import static com.tangosol.util.Filters.equal; import static com.tangosol.util.Filters.greater; import static com.tangosol.util.Filters.isFalse; import static com.tangosol.util.Filters.isTrue; import static com.tangosol.util.Filters.less; import static data.repository.Gender.FEMALE; import static data.repository.Gender.MALE; import static org.hamcrest.CoreMatchers.is; import static org.hamcrest.CoreMatchers.nullValue; import static org.hamcrest.MatcherAssert.assertThat; import static org.hamcrest.Matchers.contains; import static org.hamcrest.Matchers.containsInAnyOrder; import static org.hamcrest.Matchers.isOneOf; /** * Integration tests for {@link AbstractAsyncRepository}. * * @since 21.06 */ public abstract class AbstractAsyncRepositoryTests { protected abstract AsyncNamedMap<String, Person> getMap(); protected abstract AbstractAsyncRepository<String, Person> people(); @Before public void populateRepository() { getMap().clear().join(); people().saveAll(Stream.of( new Person("aleks").name("Aleks") .dateOfBirth(LocalDate.of(1974, 8, 24)) .age(46).gender(MALE).height(79).weight(260.0).salary(BigDecimal.valueOf(5000)), new Person("marija").name("Marija") .dateOfBirth(LocalDate.of(1978, 2, 20)) .age(43).gender(FEMALE).height(66).weight(130.0).salary(BigDecimal.valueOf(10_000)), new Person("ana").name("Ana Maria") .dateOfBirth(LocalDate.of(2004, 8, 14)) .age(16).gender(FEMALE).height(68).weight(120.0).salary(BigDecimal.valueOf(1000)), new Person("nole").name("Novak") .dateOfBirth(LocalDate.of(2007, 12, 28)) .age(13).gender(MALE).height(65).weight(125.0).salary(BigDecimal.valueOf(600)), new Person("kiki").name("Kristina") .dateOfBirth(LocalDate.of(2013, 2, 13)) .age(8).gender(FEMALE).height(50).weight(60.0).salary(BigDecimal.valueOf(400)) )).join(); } @Test public void testSave() { getMap().clear().join(); people().save(new Person("aleks").name("Aleks").age(46)).join(); assertThat(getMap().get("aleks").join().getName(), is("Aleks")); assertThat(getMap().size().join(), is(1)); } @Test public void testSaveArrayOfEntities() { assertThat(getMap().size().join(), is(5)); assertThat(getMap().get("aleks").join().getName(), is("Aleks")); assertThat(getMap().get("marija").join().getName(), is("Marija")); assertThat(getMap().get("ana").join().getName(), is("Ana Maria")); assertThat(getMap().get("nole").join().getName(), is("Novak")); assertThat(getMap().get("kiki").join().getName(), is("Kristina")); } @Test public void testSaveCollectionOfEntities() { getMap().clear().join(); Set<Person> setPeople = new HashSet<>(); setPeople.add(new Person("aleks").name("Aleks").age(46)); setPeople.add(new Person("marija").name("Marija").age(43)); setPeople.add(new Person("ana").name("Ana Maria").age(16)); setPeople.add(new Person("nole").name("Novak").age(13)); setPeople.add(new Person("kiki").name("Kristina").age(8)); people().saveAll(setPeople).join(); assertThat(getMap().size().join(), is(5)); assertThat(getMap().get("aleks").join().getName(), is("Aleks")); assertThat(getMap().get("marija").join().getName(), is("Marija")); assertThat(getMap().get("ana").join().getName(), is("Ana Maria")); assertThat(getMap().get("nole").join().getName(), is("Novak")); assertThat(getMap().get("kiki").join().getName(), is("Kristina")); } @Test public void testGet() { assertThat(people().get("kiki").join().getName(), is("Kristina")); } @Test public void testExists() { getMap().clear(); assertThat(people().exists("aleks").join(), is(false)); people().save(new Person("aleks").name("Aleks").age(46)); assertThat(people().exists("aleks").join(), is(true)); } @Test public void testGetAll() { Collection<? extends Person> results = people().getAll().join(); assertThat(results.size(), is(5)); assertThat(results.stream().map(Person::getName).collect(Collectors.toList()), containsInAnyOrder("Kristina", "Novak", "Ana Maria", "Marija", "Aleks")); } @Test public void testGetAllOrdered() { Collection<? extends Person> results = people().getAllOrderedBy(Person::getAge).join(); assertThat(results.size(), is(5)); assertThat(results.stream().map(Person::getName).collect(Collectors.toList()), contains("Kristina", "Novak", "Ana Maria", "Marija", "Aleks")); } @Test public void testGetAllOrderedByComparator() { Collection<? extends Person> results = people().getAllOrderedBy(Remote.comparator(Person::getAge).reversed()).join(); assertThat(results.size(), is(5)); assertThat(results.stream().map(Person::getName).collect(Collectors.toList()), contains("Aleks", "Marija", "Ana Maria", "Novak", "Kristina")); } @Test public void testGetAllFiltered() { Collection<? extends Person> results = people().getAll(less(Person::getAge, 10)).join(); assertThat(results.size(), is(1)); assertThat(results.iterator().next().getName(), is("Kristina")); } @Test public void testGetAllFilteredOrdered() { Collection<? extends Person> results = people() .getAllOrderedBy(greater(Person::getAge, 10), Person::getName).join(); assertThat(results.size(), is(4)); assertThat(results.stream().map(Person::getName).collect(Collectors.toList()), contains("Aleks", "Ana Maria", "Marija", "Novak")); } @Test public void testGetSingleAttribute() { assertThat(people().get("nole", Person::getAge).join(), is(13)); } @Test public void testGetFragment() { Fragment<Person> fragment = people().get("nole", Extractors.fragment(Person::getName, Person::getAge)).join(); assertThat(fragment.get(Person::getName), is("Novak")); assertThat(fragment.get(Person::getAge), is(13)); } @Test public void testGetAllSingleAttribute() { Map<String, Integer> ages = people().getAll(Person::getAge).join(); assertThat(ages.get("aleks"), is(46)); assertThat(ages.get("marija"), is(43)); assertThat(ages.get("ana"), is(16)); assertThat(ages.get("nole"), is(13)); assertThat(ages.get("kiki"), is(8)); } @Test public void testGetAllSingleAttributeByIds() { Map<String, Integer> ages = people().getAll(Collections.singleton("kiki"), Person::getAge).join(); assertThat(ages.size(), is(1)); assertThat(ages.get("kiki"), is(8)); } @Test public void testGetAllFragment() { Map<String, Fragment<Person>> map = people().getAll(Extractors.fragment(Person::getName, Person::getAge)).join(); assertThat(map.get("aleks").get(Person::getAge), is(46)); assertThat(map.get("marija").get(Person::getAge), is(43)); assertThat(map.get("ana").get(Person::getAge), is(16)); assertThat(map.get("nole").get(Person::getAge), is(13)); assertThat(map.get("kiki").get(Person::getAge), is(8)); } @Test public void testGetAllFragmentById() { Map<String, Fragment<Person>> map = people().getAll(Collections.singleton("ana"), Extractors.fragment(Person::getName, Person::getAge)).join(); assertThat(map.size(), is(1)); assertThat(map.get("ana").get(Person::getAge), is(16)); } @Test public void testGetAllExtractorAndBiConsumer() { Map<String, Integer> collected = new HashMap<>(); people().getAll(Person::getAge, collected::put).join(); assertThat(collected.size(), is(5)); collected.forEach((id, age) -> assertThat(age, is(getNamedMap().get(id).getAge()))); } @Test public void testGetAllFilterWithExtractorAndBiConsumer() { Map<String, Integer> collected = new HashMap<>(); people().getAll(less(Person::getAge, 10), Person::getAge, collected::put).join(); assertThat(collected.size(), is(1)); collected.forEach((id, age) -> assertThat(age, is(getNamedMap().get(id).getAge()))); } @Test public void testUpdate() { people().update("kiki", Person::setWeight, 65.0).join(); assertThat(getMap().get("kiki").join().getWeight(), is(65.0)); } @Test public void testUpdateWithBiFunction() { Person p = people().update("kiki", Person::weight, 65.0).join(); assertThat(getMap().get("kiki").join(), is(p)); assertThat(p.getWeight(), is(65.0)); } @Test public void testUpdateWithFunction() { Person kiki = people().update("kiki", p -> { p.setName(p.getName().toUpperCase()); p.setGender(FEMALE); p.setWeight(65.0); return p; }).join(); assertThat(getMap().get("kiki").join(), is(kiki)); assertThat(kiki.getWeight(), is(65.0)); assertThat(kiki.getGender(), is(FEMALE)); assertThat(kiki.getName(), is("KRISTINA")); } @Test public void testUpdateWithFactory() { people().update("mike", Person::setName, "Michael", Person::new).join(); assertThat(getMap().get("mike").join().getName(), is("Michael")); } @Test public void testUpdateWithBiFunctionAndFactory() { Person p = people().update("mike", Person::name, "Michael", Person::new).join(); assertThat(getMap().get("mike").join(), is(p)); assertThat(p.getName(), is("Michael")); } @Test public void testUpdateWithFunctionAndFactory() { Person mike = people().update("mike", p -> { p.setName("Michael"); p.setGender(MALE); p.setWeight(220.0); return p; }, Person::new).join(); assertThat(getMap().get("mike").join(), is(mike)); assertThat(mike.getWeight(), is(220.0)); assertThat(mike.getGender(), is(MALE)); assertThat(mike.getName(), is("Michael")); } @Test public void testUpdateAll() { people().updateAll(always(), Person::setAddress, new Address("123 Main St", "Tampa", "FL", "33555")).join(); assertThat(getMap().get("aleks").join().getAddress().getCity(), is("Tampa")); assertThat(getMap().get("marija").join().getAddress().getCity(), is("Tampa")); assertThat(getMap().get("ana").join().getAddress().getCity(), is("Tampa")); assertThat(getMap().get("nole").join().getAddress().getCity(), is("Tampa")); assertThat(getMap().get("kiki").join().getAddress().getCity(), is("Tampa")); } @Test public void testUpdateAllWithBiFunction() { Map<String, Person> adults = people().updateAll(isTrue(Person::isAdult), Person::salary, BigDecimal.valueOf(1000.0)).join(); assertThat(adults.size(), is(2)); assertThat(getMap().get("aleks").join().getSalary(), is(BigDecimal.valueOf(1000.0))); assertThat(adults.get("aleks").getSalary(), is(BigDecimal.valueOf(1000.0))); assertThat(getMap().get("marija").join().getSalary(), is(BigDecimal.valueOf(1000.0))); assertThat(adults.get("marija").getSalary(), is(BigDecimal.valueOf(1000.0))); } @Test public void testUpdateAllWithFunction() { Map<String, Person> kids = people().updateAll(isFalse(Person::isAdult), p -> { p.setName(p.getName().toUpperCase()); p.setSalary(BigDecimal.ZERO); return p; }).join(); assertThat(kids.size(), is(3)); assertThat(getMap().get("ana").join(), is(kids.get("ana"))); assertThat(getMap().get("nole").join(), is(kids.get("nole"))); assertThat(getMap().get("kiki").join(), is(kids.get("kiki"))); assertThat(kids.get("ana").getName(), is("ANA MARIA")); assertThat(kids.get("nole").getName(), is("NOVAK")); assertThat(kids.get("kiki").getName(), is("KRISTINA")); } @Test public void testRemoveById() { assertThat(people().removeById("aleks").join(), is(true)); assertThat(people().removeById("aleks").join(), is(false)); assertThat(people().count().join(), is(4L)); assertThat(people().removeById("marija", true).join().getName(), is("Marija")); assertThat(people().removeById("ana", false).join(), nullValue()); assertThat(people().count().join(), is(2L)); assertThat(people().removeById("ana", true).join(), nullValue()); assertThat(people().count().join(), is(2L)); } @Test public void testRemove() { Person aleks = getMap().get("aleks").join(); Person marija = getMap().get("marija").join(); Person ana = getMap().get("ana").join(); assertThat(people().remove(aleks).join(), is(true)); assertThat(people().remove(aleks).join(), is(false)); assertThat(people().count().join(), is(4L)); assertThat(people().remove(marija, true).join().getName(), is("Marija")); assertThat(people().remove(ana, false).join(), nullValue()); assertThat(people().count().join(), is(2L)); assertThat(people().remove(ana, true).join(), nullValue()); assertThat(people().count().join(), is(2L)); } @Test public void testRemoveAll() { Person aleks = getMap().get("aleks").join(); Person marija = getMap().get("marija").join(); Person ana = getMap().get("ana").join(); Person nole = getMap().get("nole").join(); assertThat(people().removeAll(Stream.of(aleks, marija)).join(), is(true)); assertThat(people().removeAll(Stream.of(aleks, marija)).join(), is(false)); assertThat(people().count().join(), is(3L)); Map<String, Person> map = people().removeAll(setOf(aleks, marija, ana, nole), true).join(); assertThat(map.get("aleks"), nullValue()); assertThat(map.get("marija"), nullValue()); assertThat(map.get("ana"), is(ana)); assertThat(map.get("nole"), is(nole)); assertThat(people().count().join(), is(1L)); } @Test public void testRemoveAllFilter() { Person ana = getMap().get("ana").join(); Person nole = getMap().get("nole").join(); assertThat(people().removeAll(isTrue(Person::isAdult)).join(), is(true)); assertThat(people().removeAll(isTrue(Person::isAdult)).join(), is(false)); assertThat(people().count().join(), is(3L)); Map<String, Person> map = people().removeAll(greater(Person::getAge, 10), true).join(); assertThat(map.size(), is(2)); assertThat(map.get("ana"), is(ana)); assertThat(map.get("nole"), is(nole)); assertThat(people().count().join(), is(1L)); } @Test public void testCount() { assertThat(people().count().join(), is(5L)); assertThat(people().count(isTrue(Person::isAdult)).join(), is(2L)); assertThat(people().count(isFalse(Person::isAdult)).join(), is(3L)); } @Test public void testMax() { assertThat(people().max(Person::getAge).join(), is(46)); assertThat(people().max(Person::getHeight).join(), is(79L)); assertThat(people().max(Person::getWeight).join(), is(260.0)); assertThat(people().max(Person::getSalary).join(), is(BigDecimal.valueOf(10_000))); assertThat(people().max(Person::getDateOfBirth).join(), is(LocalDate.of(2013, 2, 13))); } @Test public void testMaxFilter() { Filter<Person> filter = equal(Person::getGender, FEMALE); assertThat(people().max(filter, Person::getAge).join(), is(43)); assertThat(people().max(filter, Person::getHeight).join(), is(68L)); assertThat(people().max(filter, Person::getWeight).join(), is(130.0)); assertThat(people().max(filter, Person::getSalary).join(), is(BigDecimal.valueOf(10_000))); assertThat(people().max(filter, Person::getDateOfBirth).join(), is(LocalDate.of(2013, 2, 13))); } @Test @SuppressWarnings("OptionalGetWithoutIsPresent") public void testMaxBy() { assertThat(people().maxBy(Person::getAge).join().get(), is(getMap().get("aleks").join())); assertThat(people().maxBy(Person::getSalary).join().get(), is(getMap().get("marija").join())); } @Test @SuppressWarnings("OptionalGetWithoutIsPresent") public void testMaxByFilter() { Filter<Person> filter = isFalse(Person::isAdult); assertThat(people().maxBy(filter, Person::getAge).join().get(), is(getMap().get("ana").join())); assertThat(people().maxBy(filter, Person::getWeight).join().get(), is(getMap().get("nole").join())); } @Test public void testMin() { assertThat(people().min(Person::getAge).join(), is(8)); assertThat(people().min(Person::getHeight).join(), is(50L)); assertThat(people().min(Person::getWeight).join(), is(60.0)); assertThat(people().min(Person::getSalary).join(), is(BigDecimal.valueOf(400))); assertThat(people().min(Person::getDateOfBirth).join(), is(LocalDate.of(1974, 8, 24))); } @Test public void testMinFilter() { Filter<Person> filter = equal(Person::getGender, MALE); assertThat(people().min(filter, Person::getAge).join(), is(13)); assertThat(people().min(filter, Person::getHeight).join(), is(65L)); assertThat(people().min(filter, Person::getWeight).join(), is(125.0)); assertThat(people().min(filter, Person::getSalary).join(), is(BigDecimal.valueOf(600))); assertThat(people().min(filter, Person::getDateOfBirth).join(), is(LocalDate.of(1974, 8, 24))); } @Test @SuppressWarnings("OptionalGetWithoutIsPresent") public void testMinBy() { assertThat(people().minBy(Person::getAge).join().get(), is(getMap().get("kiki").join())); assertThat(people().minBy(Person::getDateOfBirth).join().get(), is(getMap().get("aleks").join())); } @Test @SuppressWarnings("OptionalGetWithoutIsPresent") public void testMinByFilter() { Filter<Person> filter = isTrue(Person::isAdult); assertThat(people().minBy(filter, Person::getAge).join().get(), is(getMap().get("marija").join())); assertThat(people().minBy(filter, Person::getWeight).join().get(), is(getMap().get("marija").join())); } @Test public void testSum() { assertThat(people().sum(Person::getAge).join(), is(126L)); assertThat(people().sum(Person::getHeight).join(), is(328L)); assertThat(people().sum(Person::getWeight).join(), is(695.0)); assertThat(people().sum(Person::getSalary).join(), is(BigDecimal.valueOf(17_000))); } @Test public void testSumFilter() { Filter<Person> filter = isTrue(Person::isAdult); assertThat(people().sum(filter, Person::getAge).join(), is(89L)); assertThat(people().sum(filter, Person::getHeight).join(), is(145L)); assertThat(people().sum(filter, Person::getWeight).join(), is(390.0)); assertThat(people().sum(filter, Person::getSalary).join(), is(BigDecimal.valueOf(15_000))); } @Test public void testAverage() { assertThat(people().average(Person::getAge).join(), is(25.2)); assertThat(people().average(Person::getHeight).join(), is(65.6)); assertThat(people().average(Person::getWeight).join(), is(139.0)); assertThat(people().average(Person::getSalary).join(), is(new BigDecimal("3400.00000000"))); } @Test public void testAverageFilter() { Filter<Person> filter = isTrue(Person::isAdult); assertThat(people().average(filter, Person::getAge).join(), is(44.5)); assertThat(people().average(filter, Person::getHeight).join(), is(72.5)); assertThat(people().average(filter, Person::getWeight).join(), is(195.0)); assertThat(people().average(filter, Person::getSalary).join(), is(new BigDecimal("7500.00000000"))); } @Test public void testDistinct() { assertThat(people().distinct(Person::getName).join(), containsInAnyOrder("Aleks", "Marija", "Ana Maria", "Novak", "Kristina")); assertThat(people().distinct(isTrue(Person::isAdult), Person::getName).join(), containsInAnyOrder("Aleks", "Marija")); } @Test public void testGroupBy() { Map<Gender, Set<Person>> map = people().groupBy(Person::getGender).join(); assertThat(map.size(), is(2)); assertThat(map.get(MALE), Matchers.containsInAnyOrder(getMap().get("aleks").join(), getMap().get("nole").join())); assertThat(map.get(FEMALE), Matchers.containsInAnyOrder(getMap().get("marija").join(), getMap().get("ana").join(), getMap().get("kiki").join())); } @Test public void testGroupByFiltered() { Map<Gender, Set<Person>> map = people().groupBy(isTrue(Person::isAdult), Person::getGender).join(); assertThat(map.size(), is(2)); assertThat(map.get(MALE), containsInAnyOrder(getMap().get("aleks").join())); assertThat(map.get(FEMALE), containsInAnyOrder(getMap().get("marija").join())); } @Test public void testGroupByOrdered() { Map<Gender, SortedSet<Person>> map = people().groupBy(Person::getGender, Remote.comparator(Person::getAge)).join(); assertThat(map.size(), is(2)); assertThat(map.get(MALE), Matchers.contains(getMap().get("nole").join(), getMap().get("aleks").join())); assertThat(map.get(FEMALE), Matchers.contains(getMap().get("kiki").join(), getMap().get("ana").join(), getMap().get("marija").join())); } @Test public void testGroupByOrderedFiltered() { Map<Gender, SortedSet<Person>> map = people().groupBy(isFalse(Person::isAdult), Person::getGender, Remote.comparator(Person::getAge).reversed()).join(); assertThat(map.size(), is(2)); assertThat(map.get(MALE), contains(getMap().get("nole").join())); assertThat(map.get(FEMALE), Matchers.contains(getMap().get("ana").join(), getMap().get("kiki").join())); } @Test public void testGroupByCollector() { Map<Gender, Long> map = people().groupBy(Person::getGender, RemoteCollectors.summingLong(Person::getAge)).join(); assertThat(map.size(), is(2)); assertThat(map.get(MALE), is(59L)); assertThat(map.get(FEMALE), is(67L)); } @SuppressWarnings("OptionalGetWithoutIsPresent") @Test public void testGroupByCollectorFiltered() throws Throwable { Map<Boolean, Optional<Person>> map = people().groupBy(less(Person::getWeight, 200.0), Person::isAdult, RemoteCollectors.maxBy(Person::getAge)).join(); assertThat(map.size(), is(2)); assertThat(map.get(true).get(), is(getMap().get("marija").join())); assertThat(map.get(false).get(), is(getMap().get("ana").join())); } @Test public void testGroupByCollectorWithSuppliedMap() { Map<Integer, Long> map = people().groupBy(Person::getAge, TreeMap::new, RemoteCollectors.counting()).join(); assertThat(map.size(), is(5)); assertThat(map.keySet(), contains(8, 13, 16, 43, 46)); assertThat(map.values(), contains(1L, 1L, 1L, 1L, 1L)); } @Test public void testGroupByCollectorWithSuppliedMapFiltered() { Map<Integer, Long> map = people().groupBy(isFalse(Person::isAdult), Person::getAge, TreeMap::new, RemoteCollectors.counting()).join(); assertThat(map.size(), is(3)); assertThat(map.keySet(), contains(8, 13, 16)); assertThat(map.values(), contains(1L, 1L, 1L)); } @Test public void testTop() { assertThat(people().top(Person::getAge, 2).join(), contains(46, 43)); assertThat(people().top(Person::getAge, Remote.Comparator.reverseOrder(), 2).join(), contains(8, 13)); } @Test public void testTopFilter() { assertThat(people().top(isFalse(Person::isAdult), Person::getAge, 2).join(), contains(16, 13)); assertThat(people().top(isTrue(Person::isAdult), Person::getAge, Remote.Comparator.reverseOrder(), 2).join(), contains(43, 46)); } @Test public void testTopBy() { assertThat(people().topBy(Person::getAge, 2).join(), Matchers.contains(getMap().get("aleks").join(), getMap().get("marija").join())); assertThat(people().topBy(Remote.Comparator.comparingInt(Person::getAge).reversed(), 2).join(), Matchers.contains(getMap().get("kiki").join(), getMap().get("nole").join())); } @Test public void testTopByFilter() { assertThat(people().topBy(isFalse(Person::isAdult), Person::getAge, 2).join(), Matchers.contains(getMap().get("ana").join(), getMap().get("nole").join())); assertThat(people().topBy(isTrue(Person::isAdult), Remote.Comparator.comparingInt(Person::getAge).reversed(), 2).join(), Matchers.contains(getMap().get("marija").join(), getMap().get("aleks").join())); } @Test public void testKeyListener() throws InterruptedException { getMap().clear().join(); AtomicInteger cInsert = new AtomicInteger(0); AtomicInteger cUpdate = new AtomicInteger(0); AtomicInteger cRemove = new AtomicInteger(0); CountDownLatch insert = new CountDownLatch(1); CountDownLatch update = new CountDownLatch(1); CountDownLatch remove = new CountDownLatch(1); AbstractRepositoryBase.Listener<Person> listener = people().listener() .onInsert(person -> { insert.countDown(); cInsert.incrementAndGet(); assertThat(person.getName(), is("Aleks")); }) .onUpdate(person -> { update.countDown(); cUpdate.incrementAndGet(); assertThat(person.getName(), is("ALEKS")); }) .onRemove(person -> { remove.countDown(); cRemove.incrementAndGet(); assertThat(person.getName(), is("ALEKS")); }) .build(); people().addListener("aleks", listener); populateRepository(); if (!insert.await(1, TimeUnit.SECONDS)) { Assert.fail("Didn't receive insert event"); } people().updateAll(isTrue(Person::isAdult), p -> { p.setName(p.getName().toUpperCase()); return null; }); if (!update.await(1, TimeUnit.SECONDS)) { Assert.fail("Didn't receive update event"); } people().removeById("aleks"); if (!remove.await(1, TimeUnit.SECONDS)) { Assert.fail("Didn't receive remove event"); } people().removeListener("aleks", listener); people().update("aleks", Person::setName, "Aleks", Person::new); Thread.sleep(500L); assertThat(cInsert.get(), is(1)); assertThat(cUpdate.get(), is(1)); assertThat(cRemove.get(), is(1)); } @Test public void testFilterListener() throws InterruptedException { getMap().clear().join(); AtomicInteger cInsert = new AtomicInteger(0); AtomicInteger cUpdate = new AtomicInteger(0); AtomicInteger cRemove = new AtomicInteger(0); CountDownLatch insert = new CountDownLatch(2); CountDownLatch update = new CountDownLatch(2); CountDownLatch remove = new CountDownLatch(1); AbstractRepositoryBase.Listener<Person> listener = people().listener() .onInsert(person -> { insert.countDown(); cInsert.incrementAndGet(); assertThat(person.getName(), isOneOf("Aleks", "Marija")); }) .onUpdate((personOld, personNew) -> { update.countDown(); cUpdate.incrementAndGet(); assertThat(personOld.getName(), isOneOf("Aleks", "Marija")); assertThat(personNew.getName(), isOneOf("ALEKS", "MARIJA")); }) .onRemove(person -> { remove.countDown(); cRemove.incrementAndGet(); assertThat(person.getName(), isOneOf("ALEKS", "MARIJA")); }) .build(); people().addListener(isTrue(Person::isAdult), listener); populateRepository(); if (!insert.await(1, TimeUnit.SECONDS)) { Assert.fail("Didn't receive insert events"); } people().updateAll(always(), p -> { p.setName(p.getName().toUpperCase()); return null; }); if (!update.await(1, TimeUnit.SECONDS)) { Assert.fail("Didn't receive update events"); } people().removeById("aleks"); people().removeById("ana"); if (!remove.await(1, TimeUnit.SECONDS)) { Assert.fail("Didn't receive remove event"); } people().removeListener(isTrue(Person::isAdult), listener); people().update("aleks", Person::setName, "Aleks", Person::new); Thread.sleep(500L); assertThat(cInsert.get(), is(2)); assertThat(cUpdate.get(), is(2)); assertThat(cRemove.get(), is(1)); } @Test public void testGlobalListener() throws InterruptedException { getMap().clear().join(); AtomicInteger cInsert = new AtomicInteger(0); AtomicInteger cUpdate = new AtomicInteger(0); AtomicInteger cRemove = new AtomicInteger(0); CountDownLatch insert = new CountDownLatch(5); CountDownLatch update = new CountDownLatch(5); CountDownLatch remove = new CountDownLatch(5); AbstractRepositoryBase.Listener<Person> listener = people().listener() .onInsert(person -> { insert.countDown(); cInsert.incrementAndGet(); assertThat(person.getName(), isOneOf("Aleks", "Marija", "Ana Maria", "Novak", "Kristina")); }) .onUpdate((personOld, personNew) -> { update.countDown(); cUpdate.incrementAndGet(); //if (!(this instanceof DefaultAsyncRepositoryTest)) // { assertThat(personOld.getName(), isOneOf("Aleks", "Marija", "Ana Maria", "Novak", "Kristina")); // } //assertThat(personNew.getName(), isOneOf("ALEKS", "MARIJA", "ANA MARIA", "NOVAK", "KRISTINA")); }) .onRemove(person -> { remove.countDown(); cRemove.incrementAndGet(); assertThat(person.getName(), isOneOf("ALEKS", "MARIJA", "ANA MARIA", "NOVAK", "KRISTINA")); }) .build(); people().addListener(listener); populateRepository(); if (!insert.await(1, TimeUnit.SECONDS)) { Assert.fail("Didn't receive insert events"); } people().updateAll(always(), p -> { p.setName(p.getName().toUpperCase()); return null; }); if (!update.await(1, TimeUnit.SECONDS)) { Assert.fail("Didn't receive update events"); } people().removeAll(always()); if (!remove.await(1, TimeUnit.SECONDS)) { Assert.fail("Didn't receive remove event"); } people().removeListener(listener); people().update("aleks", Person::setName, "Aleks", Person::new); Thread.sleep(500L); assertThat(cInsert.get(), is(5)); assertThat(cUpdate.get(), is(5)); assertThat(cRemove.get(), is(5)); } // ---- helpers --------------------------------------------------------- @SafeVarargs private static <T> Set<T> setOf(T... values) { return Stream.of(values).collect(Collectors.toSet()); } protected NamedMap<String, Person> getNamedMap() { return getMap().getNamedMap(); } }
google/j2objc
36,074
jre_emul/android/platform/libcore/harmony-tests/src/test/java/org/apache/harmony/tests/java/util/ArrayDequeTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.harmony.tests.java.util; import java.io.Serializable; import java.util.ArrayDeque; import java.util.ArrayList; import java.util.Arrays; import java.util.ConcurrentModificationException; import java.util.Iterator; import java.util.LinkedList; import java.util.NoSuchElementException; import java.util.Spliterator; import junit.framework.TestCase; import libcore.java.util.ForEachRemainingTester; import libcore.java.util.SpliteratorTester; import org.apache.harmony.testframework.serialization.SerializationTest; import org.apache.harmony.testframework.serialization.SerializationTest.SerializableAssert; public class ArrayDequeTest extends TestCase { private Object testObjOne; private Object testObjTwo; private Object testObjThree; private Object testObjFour; private Object testObjLast; private ArrayDeque<Object> testQue; @Override protected void setUp() throws Exception { super.setUp(); testObjOne = new Object(); testObjTwo = new Object(); testObjThree = new Object(); testObjFour = new Object(); testObjLast = new Object(); testQue = new ArrayDeque<Object>(); } /** * {@link java.util.ArrayDeque#ArrayDeque()} */ public void test_Constructor() throws Exception { assertEquals(0, new ArrayDeque<Object>().size()); } /** * {@link java.util.ArrayDeque#ArrayDeque(java.util.Collection)} */ public void test_Constructor_LCollection() throws Exception { assertEquals(0, new ArrayDeque<Object>(new ArrayList<Object>()).size()); try { new ArrayDeque<Object>(null); fail("should throw NPE"); } catch (NullPointerException e) { // expected } } /** * {@link java.util.ArrayDeque#ArrayDeque(int)} */ public void test_Constructor_Int() throws Exception { assertEquals(0, new ArrayDeque<Object>(8).size()); ArrayDeque<Object> zeroCapQue = new ArrayDeque<Object>(0); assertEquals(0, zeroCapQue.size()); zeroCapQue.add(testObjOne); assertEquals(1, zeroCapQue.size()); assertEquals(0, new ArrayDeque<Object>(0).size()); ArrayDeque<Object> negCapQue = new ArrayDeque<Object>(-1); assertEquals(0, negCapQue.size()); negCapQue.add(testObjOne); assertEquals(1, negCapQue.size()); ArrayDeque<Object> oneCapQue = new ArrayDeque<Object>(1); assertEquals(0, oneCapQue.size()); oneCapQue.add(testObjOne); assertEquals(1, oneCapQue.size()); oneCapQue.add(testObjOne); assertEquals(2, oneCapQue.size()); } /** * {@link java.util.ArrayDeque#addFirst(Object)} */ public void test_addFirst() throws Exception { testQue.addFirst(testObjOne); assertEquals(1, testQue.size()); assertEquals(testObjOne, testQue.peek()); testQue.addFirst(testObjOne); assertEquals(2, testQue.size()); assertEquals(testObjOne, testQue.peek()); testQue.addFirst(testObjTwo); assertEquals(3, testQue.size()); assertEquals(testObjTwo, testQue.peek()); assertEquals(testObjOne, testQue.getLast()); try { testQue.addFirst(null); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } } /** * {@link java.util.ArrayDeque#addLast(Object)} */ public void test_addLast() throws Exception { testQue.addLast(testObjOne); assertEquals(1, testQue.size()); assertEquals(testObjOne, testQue.peek()); testQue.addLast(testObjOne); assertEquals(2, testQue.size()); assertEquals(testObjOne, testQue.peek()); testQue.addLast(testObjTwo); assertEquals(3, testQue.size()); assertEquals(testObjOne, testQue.peek()); assertEquals(testObjTwo, testQue.getLast()); try { testQue.addLast(null); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } } /** * {@link java.util.ArrayDeque#offerFirst(Object)} */ public void test_offerFirst() throws Exception { assertTrue(testQue.offerFirst(testObjOne)); assertEquals(1, testQue.size()); assertEquals(testObjOne, testQue.peek()); assertTrue(testQue.offerFirst(testObjOne)); assertEquals(2, testQue.size()); assertEquals(testObjOne, testQue.peek()); assertTrue(testQue.offerFirst(testObjTwo)); assertEquals(3, testQue.size()); assertEquals(testObjTwo, testQue.peek()); assertEquals(testObjOne, testQue.getLast()); try { testQue.offerFirst(null); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } } /** * {@link java.util.ArrayDeque#offerLast(Object)} */ public void test_offerLast() throws Exception { assertTrue(testQue.offerLast(testObjOne)); assertEquals(1, testQue.size()); assertEquals(testObjOne, testQue.peek()); assertTrue(testQue.offerLast(testObjOne)); assertEquals(2, testQue.size()); assertEquals(testObjOne, testQue.peek()); assertTrue(testQue.offerLast(testObjTwo)); assertEquals(3, testQue.size()); assertEquals(testObjOne, testQue.peek()); assertEquals(testObjTwo, testQue.getLast()); try { testQue.offerLast(null); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } } /** * {@link java.util.ArrayDeque#removeFirst()} */ public void test_removeFirst() throws Exception { assertTrue(testQue.offerLast(testObjOne)); assertTrue(testQue.offerLast(testObjTwo)); assertTrue(testQue.offerLast(testObjThree)); assertEquals(3, testQue.size()); assertEquals(testObjOne, testQue.removeFirst()); assertEquals(2, testQue.size()); assertEquals(testObjTwo, testQue.removeFirst()); assertEquals(testObjThree, testQue.removeFirst()); assertEquals(0, testQue.size()); try { testQue.removeFirst(); fail("should throw NoSuchElementException "); } catch (NoSuchElementException e) { // expected } } /** * {@link java.util.ArrayDeque#removeLast()} */ public void test_removeLast() throws Exception { assertTrue(testQue.offerLast(testObjOne)); assertTrue(testQue.offerLast(testObjTwo)); assertTrue(testQue.offerLast(testObjThree)); assertEquals(3, testQue.size()); assertEquals(testObjThree, testQue.removeLast()); assertEquals(2, testQue.size()); assertEquals(testObjTwo, testQue.removeLast()); assertEquals(testObjOne, testQue.removeLast()); assertEquals(0, testQue.size()); try { testQue.removeLast(); fail("should throw NoSuchElementException "); } catch (NoSuchElementException e) { // expected } } /** * {@link java.util.ArrayDeque#pollFirst()} */ public void test_pollFirst() throws Exception { assertTrue(testQue.offerLast(testObjOne)); assertTrue(testQue.offerLast(testObjTwo)); assertTrue(testQue.offerLast(testObjThree)); assertEquals(3, testQue.size()); assertEquals(testObjOne, testQue.pollFirst()); assertEquals(2, testQue.size()); assertEquals(testObjTwo, testQue.pollFirst()); assertEquals(testObjThree, testQue.pollFirst()); assertEquals(0, testQue.size()); assertNull(testQue.pollFirst()); } /** * {@link java.util.ArrayDeque#peekLast()} */ public void test_pollLast() throws Exception { assertTrue(testQue.offerLast(testObjOne)); assertTrue(testQue.offerLast(testObjTwo)); assertTrue(testQue.offerLast(testObjThree)); assertEquals(3, testQue.size()); assertEquals(testObjThree, testQue.pollLast()); assertEquals(2, testQue.size()); assertEquals(testObjTwo, testQue.pollLast()); assertEquals(testObjOne, testQue.pollLast()); assertEquals(0, testQue.size()); assertNull(testQue.pollFirst()); } /** * {@link java.util.ArrayDeque#getFirst()} */ public void test_getFirst() throws Exception { assertTrue(testQue.offerLast(testObjOne)); assertTrue(testQue.offerLast(testObjTwo)); assertTrue(testQue.offerLast(testObjThree)); assertEquals(3, testQue.size()); assertEquals(testObjOne, testQue.getFirst()); assertEquals(3, testQue.size()); assertEquals(testObjOne, testQue.pollFirst()); assertEquals(testObjTwo, testQue.getFirst()); assertEquals(testObjTwo, testQue.pollFirst()); assertEquals(testObjThree, testQue.pollFirst()); assertEquals(0, testQue.size()); try { testQue.getFirst(); fail("should throw NoSuchElementException "); } catch (NoSuchElementException e) { // expected } } /** * {@link java.util.ArrayDeque#getLast()} */ public void test_getLast() throws Exception { assertTrue(testQue.offerLast(testObjOne)); assertTrue(testQue.offerLast(testObjTwo)); assertTrue(testQue.offerLast(testObjThree)); assertEquals(3, testQue.size()); assertEquals(testObjThree, testQue.getLast()); assertEquals(3, testQue.size()); assertEquals(testObjThree, testQue.pollLast()); assertEquals(testObjTwo, testQue.getLast()); assertEquals(testObjTwo, testQue.pollLast()); assertEquals(testObjOne, testQue.pollLast()); assertEquals(0, testQue.size()); try { testQue.getLast(); fail("should throw NoSuchElementException "); } catch (NoSuchElementException e) { // expected } } /** * {@link java.util.ArrayDeque#peekFirst()} */ public void test_peekFirst() throws Exception { assertTrue(testQue.offerLast(testObjOne)); assertTrue(testQue.offerLast(testObjTwo)); assertTrue(testQue.offerLast(testObjThree)); assertEquals(3, testQue.size()); assertEquals(testObjOne, testQue.peekFirst()); assertEquals(3, testQue.size()); assertEquals(testObjOne, testQue.pollFirst()); assertEquals(testObjTwo, testQue.peekFirst()); assertEquals(testObjTwo, testQue.pollFirst()); assertEquals(testObjThree, testQue.pollFirst()); assertEquals(0, testQue.size()); assertEquals(null, testQue.peekFirst()); } /** * {@link java.util.ArrayDeque#peekLast()} */ public void test_peekLast() throws Exception { assertTrue(testQue.offerLast(testObjOne)); assertTrue(testQue.offerLast(testObjTwo)); assertTrue(testQue.offerLast(testObjThree)); assertEquals(3, testQue.size()); assertEquals(testObjThree, testQue.peekLast()); assertEquals(3, testQue.size()); assertEquals(testObjThree, testQue.pollLast()); assertEquals(testObjTwo, testQue.peekLast()); assertEquals(testObjTwo, testQue.pollLast()); assertEquals(testObjOne, testQue.pollLast()); assertEquals(0, testQue.size()); assertNull(testQue.peekLast()); } /** * {@link java.util.ArrayDeque#removeFirstOccurrence(Object)} */ public void test_removeFirstOccurrence() throws Exception { assertTrue(testQue.offerLast(testObjOne)); assertTrue(testQue.offerLast(testObjTwo)); assertTrue(testQue.offerLast(testObjOne)); assertTrue(testQue.offerLast(testObjThree)); assertTrue(testQue.offerLast(testObjOne)); assertEquals(5, testQue.size()); assertTrue(testQue.removeFirstOccurrence(testObjOne)); assertFalse(testQue.removeFirstOccurrence(testObjFour)); assertEquals(testObjTwo, testQue.peekFirst()); assertEquals(testObjOne, testQue.peekLast()); assertEquals(4, testQue.size()); assertTrue(testQue.removeFirstOccurrence(testObjOne)); assertEquals(3, testQue.size()); assertEquals(testObjOne, testQue.peekLast()); assertTrue(testQue.removeFirstOccurrence(testObjOne)); assertEquals(2, testQue.size()); assertEquals(testObjThree, testQue.peekLast()); assertFalse(testQue.removeFirstOccurrence(testObjOne)); } /** * {@link java.util.ArrayDeque#removeLastOccurrence(Object)} */ public void test_removeLastOccurrence() throws Exception { assertTrue(testQue.offerLast(testObjOne)); assertTrue(testQue.offerLast(testObjTwo)); assertTrue(testQue.offerLast(testObjOne)); assertTrue(testQue.offerLast(testObjThree)); assertTrue(testQue.offerLast(testObjOne)); assertEquals(5, testQue.size()); assertTrue(testQue.removeLastOccurrence(testObjOne)); assertFalse(testQue.removeLastOccurrence(testObjFour)); assertEquals(testObjOne, testQue.peekFirst()); assertEquals(testObjThree, testQue.peekLast()); assertEquals(4, testQue.size()); assertTrue(testQue.removeLastOccurrence(testObjOne)); assertEquals(3, testQue.size()); assertEquals(testObjOne, testQue.peekFirst()); assertEquals(testObjThree, testQue.peekLast()); assertTrue(testQue.removeLastOccurrence(testObjOne)); assertEquals(2, testQue.size()); assertEquals(testObjThree, testQue.peekLast()); assertFalse(testQue.removeLastOccurrence(testObjOne)); } /** * {@link java.util.ArrayDeque#add(Object)} */ public void test_add() throws Exception { assertTrue(testQue.add(testObjOne)); assertTrue(testQue.add(testObjTwo)); assertTrue(testQue.add(testObjOne)); assertTrue(testQue.add(testObjThree)); assertEquals(testObjOne, testQue.peekFirst()); assertEquals(testObjThree, testQue.peekLast()); try { testQue.add(null); fail("Should throw NullPointerException"); } catch (NullPointerException e) { // expected } } /** * {@link java.util.ArrayDeque#offer(Object)} */ public void test_offer() throws Exception { assertTrue(testQue.offer(testObjOne)); assertTrue(testQue.offer(testObjTwo)); assertTrue(testQue.offer(testObjOne)); assertTrue(testQue.offer(testObjThree)); assertEquals(testObjOne, testQue.peekFirst()); assertEquals(testObjThree, testQue.peekLast()); try { testQue.offer(null); fail("Should throw NullPointerException"); } catch (NullPointerException e) { // expected } } /** * {@link java.util.ArrayDeque#remove()} */ public void test_remove() throws Exception { assertTrue(testQue.offerLast(testObjOne)); assertTrue(testQue.offerLast(testObjTwo)); assertTrue(testQue.offerLast(testObjThree)); assertEquals(3, testQue.size()); assertEquals(testObjOne, testQue.remove()); assertEquals(2, testQue.size()); assertEquals(testObjTwo, testQue.remove()); assertEquals(testObjThree, testQue.remove()); assertEquals(0, testQue.size()); try { testQue.remove(); fail("should throw NoSuchElementException "); } catch (NoSuchElementException e) { // expected } } /** * {@link java.util.ArrayDeque#poll()} */ public void test_poll() throws Exception { assertTrue(testQue.offerLast(testObjOne)); assertTrue(testQue.offerLast(testObjTwo)); assertTrue(testQue.offerLast(testObjThree)); assertEquals(3, testQue.size()); assertEquals(testObjOne, testQue.poll()); assertEquals(2, testQue.size()); assertEquals(testObjTwo, testQue.poll()); assertEquals(testObjThree, testQue.poll()); assertEquals(0, testQue.size()); assertNull(testQue.poll()); } /** * {@link java.util.ArrayDeque#element()} */ public void test_element() throws Exception { assertTrue(testQue.offerLast(testObjOne)); assertTrue(testQue.offerLast(testObjTwo)); assertTrue(testQue.offerLast(testObjThree)); assertEquals(3, testQue.size()); assertEquals(testObjOne, testQue.element()); assertEquals(3, testQue.size()); assertEquals(testObjOne, testQue.pollFirst()); assertEquals(testObjTwo, testQue.element()); assertEquals(testObjTwo, testQue.pollFirst()); assertEquals(testObjThree, testQue.element()); assertEquals(testObjThree, testQue.pollFirst()); assertEquals(0, testQue.size()); try { testQue.element(); fail("should throw NoSuchElementException "); } catch (NoSuchElementException e) { // expected } } /** * {@link java.util.ArrayDeque#peek()} */ public void test_peek() throws Exception { assertTrue(testQue.offerLast(testObjOne)); assertTrue(testQue.offerLast(testObjTwo)); assertTrue(testQue.offerLast(testObjThree)); assertEquals(3, testQue.size()); assertEquals(testObjOne, testQue.peek()); assertEquals(3, testQue.size()); assertEquals(testObjOne, testQue.pollFirst()); assertEquals(testObjTwo, testQue.peek()); assertEquals(testObjTwo, testQue.pollFirst()); assertEquals(testObjThree, testQue.pollFirst()); assertEquals(0, testQue.size()); assertEquals(null, testQue.peek()); } /** * {@link java.util.ArrayDeque#push(Object)} */ public void test_push() throws Exception { testQue.push(testObjOne); assertEquals(1, testQue.size()); assertEquals(testObjOne, testQue.peek()); testQue.push(testObjOne); assertEquals(2, testQue.size()); assertEquals(testObjOne, testQue.peek()); testQue.push(testObjTwo); assertEquals(3, testQue.size()); assertEquals(testObjTwo, testQue.peek()); assertEquals(testObjOne, testQue.getLast()); try { testQue.push(null); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } } /** * {@link java.util.ArrayDeque#pop()} */ public void test_pop() throws Exception { assertTrue(testQue.offerLast(testObjOne)); assertTrue(testQue.offerLast(testObjTwo)); assertTrue(testQue.offerLast(testObjThree)); assertEquals(3, testQue.size()); assertEquals(testObjOne, testQue.pop()); assertEquals(2, testQue.size()); assertEquals(testObjTwo, testQue.pop()); assertEquals(testObjThree, testQue.pop()); assertEquals(0, testQue.size()); try { testQue.pop(); fail("should throw NoSuchElementException "); } catch (NoSuchElementException e) { // expected } } /** * {@link java.util.ArrayDeque#addFirst(Object)} */ public void test_size() throws Exception { assertEquals(0, testQue.size()); assertTrue(testQue.add(testObjOne)); assertTrue(testQue.add(testObjTwo)); assertEquals(2, testQue.size()); assertTrue(testQue.add(testObjOne)); assertTrue(testQue.add(testObjThree)); assertEquals(4, testQue.size()); testQue.remove(); testQue.remove(); assertEquals(2, testQue.size()); testQue.clear(); assertEquals(0, testQue.size()); } /** * {@link java.util.ArrayDeque#isEmpty()} */ public void test_isEmpty() throws Exception { assertTrue(testQue.isEmpty()); assertTrue(testQue.add(testObjOne)); assertFalse(testQue.isEmpty()); assertTrue(testQue.add(testObjTwo)); assertFalse(testQue.isEmpty()); assertTrue(testQue.add(testObjOne)); assertTrue(testQue.add(testObjThree)); assertFalse(testQue.isEmpty()); testQue.remove(); testQue.remove(); assertFalse(testQue.isEmpty()); testQue.clear(); assertTrue(testQue.isEmpty()); } /** * {@link java.util.ArrayDeque#iterator()} */ public void test_iterator() throws Exception { assertFalse(testQue.iterator().hasNext()); assertTrue(testQue.add(testObjOne)); assertTrue(testQue.add(testObjTwo)); assertTrue(testQue.add(testObjOne)); assertTrue(testQue.add(testObjThree)); assertTrue(testQue.add(testObjLast)); Iterator result = testQue.iterator(); assertEquals(5, testQue.size()); try { result.remove(); fail("should throw IllegalStateException"); } catch (IllegalStateException e) { // expected } assertTrue(testQue.add(testObjThree)); try { result.next(); fail("should throw ConcurrentModificationException"); } catch (ConcurrentModificationException e) { // expected } result = testQue.iterator(); assertEquals(testObjOne, result.next()); assertEquals(testObjTwo, result.next()); assertEquals(testObjOne, result.next()); assertEquals(testObjThree, result.next()); assertEquals(testObjLast, result.next()); assertTrue(result.hasNext()); result.remove(); assertEquals(testObjThree, result.next()); assertFalse(result.hasNext()); try { result.next(); fail("should throw NoSuchElementException"); } catch (NoSuchElementException e) { // expected } // test a full array ArrayDeque<Object> ad = new ArrayDeque<Object>(); // fill the array for (int i = 0; i < 16; ++i) { ad.addLast(new Object()); } assertTrue(ad.iterator().hasNext()); Iterator<Object> iter = ad.iterator(); for (int i = 0; i < 16; ++i) { assertTrue(iter.hasNext()); iter.next(); } iter.remove(); // test un-full array ad = new ArrayDeque<Object>(); // fill the array for (int i = 0; i < 5; ++i) { ad.addLast(new Object()); } iter = ad.iterator(); for (int i = 0; i < 5; ++i) { assertTrue(iter.hasNext()); iter.next(); } iter.remove(); ad = new ArrayDeque<Object>(); // fill the array for (int i = 0; i < 16; ++i) { ad.addLast(new Object()); } iter = ad.iterator(); assertTrue(iter.hasNext()); for (int i = 0; i < ad.size(); ++i) { iter.next(); } assertFalse(iter.hasNext()); iter.remove(); ad.add(new Object()); assertFalse(iter.hasNext()); } /** * {@link java.util.ArrayDeque#descendingIterator()} */ public void test_descendingIterator() throws Exception { assertFalse(testQue.descendingIterator().hasNext()); assertTrue(testQue.add(testObjOne)); assertTrue(testQue.add(testObjTwo)); assertTrue(testQue.add(testObjOne)); assertTrue(testQue.add(testObjThree)); assertTrue(testQue.add(testObjLast)); Iterator result = testQue.descendingIterator(); assertEquals(5, testQue.size()); try { result.remove(); fail("should throw IllegalStateException"); } catch (IllegalStateException e) { // expected } assertTrue(testQue.add(testObjFour)); // a strange behavior here, RI's descendingIterator() and iterator() is // properly different. Notice spec: "The iterators returned by this // class's iterator method are fail-fast". RI shows descendingIterator() // is not an iterator method. assertEquals(testObjLast, result.next()); result = testQue.descendingIterator(); assertEquals(testObjFour, result.next()); assertEquals(testObjLast, result.next()); assertEquals(testObjThree, result.next()); assertEquals(testObjOne, result.next()); assertEquals(testObjTwo, result.next()); assertTrue(result.hasNext()); result.remove(); assertEquals(testObjOne, result.next()); assertFalse(result.hasNext()); try { result.next(); fail("should throw NoSuchElementException"); } catch (NoSuchElementException e) { // expected } // test a full array ArrayDeque<Object> ad = new ArrayDeque<Object>(); // fill the array for (int i = 0; i < 16; ++i) { ad.addLast(new Object()); } assertTrue(ad.descendingIterator().hasNext()); Iterator<Object> iter = ad.descendingIterator(); for (int i = 0; i < 16; ++i) { assertTrue(iter.hasNext()); iter.next(); } iter.remove(); // test un-full array ad = new ArrayDeque<Object>(); // fill the array for (int i = 0; i < 5; ++i) { ad.addLast(new Object()); } iter = ad.descendingIterator(); for (int i = 0; i < 5; ++i) { assertTrue(iter.hasNext()); iter.next(); } iter.remove(); ad = new ArrayDeque<Object>(); // fill the array for (int i = 0; i < 16; ++i) { ad.addLast(new Object()); } iter = ad.descendingIterator(); assertTrue(iter.hasNext()); for (int i = 0; i < ad.size(); ++i) { iter.next(); } assertFalse(iter.hasNext()); iter.remove(); ad.add(new Object()); assertFalse(iter.hasNext()); } /** * {@link java.util.ArrayDeque#contains(Object)} */ public void test_contains() throws Exception { assertFalse(testQue.contains(testObjFour)); assertFalse(testQue.contains(null)); assertTrue(testQue.add(testObjOne)); assertTrue(testQue.add(testObjTwo)); assertTrue(testQue.add(testObjOne)); assertTrue(testQue.add(testObjThree)); assertTrue(testQue.add(testObjLast)); assertTrue(testQue.contains(testObjOne)); assertTrue(testQue.contains(testObjTwo)); assertTrue(testQue.contains(testObjThree)); assertTrue(testQue.contains(testObjLast)); assertFalse(testQue.contains(null)); testQue.clear(); assertFalse(testQue.contains(testObjOne)); assertFalse(testQue.contains(testObjTwo)); } /** * {@link java.util.ArrayDeque#remove(Object)} */ public void test_remove_LObject() throws Exception { assertTrue(testQue.offerLast(testObjOne)); assertTrue(testQue.offerLast(testObjTwo)); assertTrue(testQue.offerLast(testObjOne)); assertTrue(testQue.offerLast(testObjThree)); assertTrue(testQue.offerLast(testObjOne)); assertEquals(5, testQue.size()); assertTrue(testQue.remove(testObjOne)); assertFalse(testQue.remove(testObjFour)); assertEquals(testObjTwo, testQue.peekFirst()); assertEquals(testObjOne, testQue.peekLast()); assertEquals(4, testQue.size()); assertTrue(testQue.remove(testObjOne)); assertEquals(3, testQue.size()); assertEquals(testObjOne, testQue.peekLast()); assertTrue(testQue.remove(testObjOne)); assertEquals(2, testQue.size()); assertEquals(testObjThree, testQue.peekLast()); assertFalse(testQue.remove(testObjOne)); } /** * {@link java.util.ArrayDeque#clear()} */ public void test_clear() throws Exception { assertTrue(testQue.isEmpty()); testQue.clear(); assertTrue(testQue.isEmpty()); assertTrue(testQue.add(testObjOne)); assertTrue(testQue.add(testObjTwo)); assertTrue(testQue.add(testObjOne)); assertTrue(testQue.add(testObjThree)); testQue.clear(); assertTrue(testQue.isEmpty()); } /** * {@link java.util.ArrayDeque#toArray()} */ public void test_toArray() throws Exception { assertEquals(0, testQue.toArray().length); assertTrue(testQue.add(testObjOne)); assertTrue(testQue.add(testObjTwo)); assertTrue(testQue.add(testObjOne)); assertTrue(testQue.add(testObjThree)); assertTrue(testQue.add(testObjLast)); Object[] result = testQue.toArray(); assertEquals(5, testQue.size()); assertEquals(testObjOne, result[0]); assertEquals(testObjTwo, result[1]); assertEquals(testObjOne, result[2]); assertEquals(testObjThree, result[3]); assertEquals(testObjLast, result[4]); // change in array do not affect ArrayDeque result[0] = null; assertEquals(5, testQue.size()); assertEquals(testObjOne, testQue.peek()); } /** * {@link java.util.ArrayDeque#toArray(Object[])} */ public void test_toArray_$LObject() throws Exception { Object[] array = new Object[0]; Object[] result = testQue.toArray(array); assertEquals(0, result.length); assertEquals(array, result); assertTrue(testQue.add(testObjOne)); assertTrue(testQue.add(testObjTwo)); assertTrue(testQue.add(testObjOne)); assertTrue(testQue.add(testObjThree)); assertTrue(testQue.add(testObjLast)); result = testQue.toArray(array); assertEquals(5, testQue.size()); assertEquals(5, result.length); assertEquals(0, array.length); assertFalse(array == result); assertEquals(testObjOne, result[0]); assertEquals(testObjTwo, result[1]); assertEquals(testObjOne, result[2]); assertEquals(testObjThree, result[3]); assertEquals(testObjLast, result[4]); // change in array do not affect ArrayDeque result[0] = null; assertEquals(5, testQue.size()); assertEquals(testObjOne, testQue.peek()); try { testQue.toArray(null); fail("should throw NullPointerException"); } catch (NullPointerException e) { // expected } } /** * {@link java.util.ArrayDeque#clone()} */ public void test_clone() throws Exception { ArrayDeque<Object> cloned = testQue.clone(); assertEquals(0, cloned.size()); assertFalse(cloned == testQue); assertTrue(testQue.add(testObjOne)); assertTrue(testQue.add(testObjTwo)); assertTrue(testQue.add(testObjOne)); assertTrue(testQue.add(testObjThree)); assertTrue(testQue.add(testObjLast)); assertTrue(testQue.add(testQue)); cloned = testQue.clone(); assertEquals(6, cloned.size()); while (0 != testQue.size()) { assertEquals(testQue.remove(), cloned.remove()); } } public void test_forEachRemaining_iterator() throws Exception { ForEachRemainingTester.test_forEachRemaining(new ArrayDeque<>(), new String[]{ "foo", "bar", "baz "}); ForEachRemainingTester.test_forEachRemaining_NPE(new ArrayDeque<>(), new String[]{"foo", "bar", "baz "}); } public void test_forEachRemaining_CME() throws Exception { ArrayDeque<String> adq = new ArrayDeque<>(); adq.add("foo"); // The ArrayDeque forEachRemaining implementation doesn't use a precise check // for concurrent modifications. adq.iterator().forEachRemaining(s -> adq.add(s)); } public void test_spliterator() throws Exception { ArrayList<Integer> testElements = new ArrayList<>( Arrays.asList(1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 15, 16)); ArrayDeque<Integer> adq = new ArrayDeque<>(); adq.addAll(testElements); SpliteratorTester.runBasicIterationTests(adq.spliterator(), testElements); SpliteratorTester.runBasicSplitTests(adq, testElements); SpliteratorTester.testSpliteratorNPE(adq.spliterator()); assertTrue(adq.spliterator().hasCharacteristics( Spliterator.ORDERED | Spliterator.SIZED | Spliterator.SUBSIZED)); SpliteratorTester.runOrderedTests(adq); SpliteratorTester.runSizedTests(adq, 16 /* expected size */); SpliteratorTester.runSubSizedTests(adq, 16 /* expected size */); SpliteratorTester.assertSupportsTrySplit(adq); } public void test_spliterator_CME() throws Exception { ArrayDeque<Integer> adq = new ArrayDeque<>(); adq.add(52); Spliterator<Integer> sp = adq.spliterator(); // Spliterators from ArrayDequeues never throw CME. The following statements // would have thrown a CME on most other collection classes. assertTrue(sp.tryAdvance(value -> adq.add(value))); sp.forEachRemaining(value -> adq.add(value)); } /** * java.util.ArrayDeque#Serialization() */ public void test_serialization() throws Exception { assertTrue(testQue.add(new Integer(1))); assertTrue(testQue.add(new Integer(2))); assertTrue(testQue.add(new Integer(3))); assertTrue(testQue.add(new Integer(4))); assertTrue(testQue.add(new Integer(5))); SerializationTest.verifySelf(testQue, new SerializableAssert() { public void assertDeserialized(Serializable initial, Serializable deserialized) { ArrayDeque<Object> formerQue = (ArrayDeque) initial; ArrayDeque<Object> deserializedQue = (ArrayDeque) deserialized; assertEquals(formerQue.remove(), deserializedQue.remove()); } }); } /** * serialization/deserialization compatibility with RI. */ @SuppressWarnings({ "unchecked", "boxing" }) public void testSerializationCompatibility() throws Exception { assertTrue(testQue.add(new Integer(1))); assertTrue(testQue.add(new Integer(2))); assertTrue(testQue.add(new Integer(3))); assertTrue(testQue.add(new Integer(4))); assertTrue(testQue.add(new Integer(5))); SerializationTest.verifyGolden(this, testQue, new SerializableAssert() { public void assertDeserialized(Serializable initial, Serializable deserialized) { ArrayDeque<Object> formerQue = (ArrayDeque) initial; ArrayDeque<Object> deserializedQue = (ArrayDeque) deserialized; assertEquals(formerQue.remove(), deserializedQue.remove()); } }); } }
googleapis/google-cloud-java
35,956
java-securitycenter-settings/proto-google-cloud-securitycenter-settings-v1beta1/src/main/java/com/google/cloud/securitycenter/settings/v1beta1/BatchGetSettingsRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/securitycenter/settings/v1beta1/securitycenter_settings_service.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.securitycenter.settings.v1beta1; /** * * * <pre> * Request message for BatchGetSettings. * </pre> * * Protobuf type {@code google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest} */ public final class BatchGetSettingsRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest) BatchGetSettingsRequestOrBuilder { private static final long serialVersionUID = 0L; // Use BatchGetSettingsRequest.newBuilder() to construct. private BatchGetSettingsRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private BatchGetSettingsRequest() { parent_ = ""; names_ = com.google.protobuf.LazyStringArrayList.emptyList(); } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new BatchGetSettingsRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securitycenter.settings.v1beta1.SettingsServiceProto .internal_static_google_cloud_securitycenter_settings_v1beta1_BatchGetSettingsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securitycenter.settings.v1beta1.SettingsServiceProto .internal_static_google_cloud_securitycenter_settings_v1beta1_BatchGetSettingsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest.class, com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest.Builder.class); } public static final int PARENT_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * Required. The relative resource name of the organization shared by all of the * settings being retrieved. * Format: * * `organizations/{organization}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * Required. The relative resource name of the organization shared by all of the * settings being retrieved. * Format: * * `organizations/{organization}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int NAMES_FIELD_NUMBER = 2; @SuppressWarnings("serial") private com.google.protobuf.LazyStringArrayList names_ = com.google.protobuf.LazyStringArrayList.emptyList(); /** * * * <pre> * The names of the settings to retrieve. * A maximum of 1000 settings can be retrieved in a batch. * Formats: * * `organizations/{organization}/settings` * * `folders/{folder}/settings` * * `projects/{project}/settings` * * `projects/{project}/locations/{location}/clusters/{cluster}/settings` * * `projects/{project}/regions/{region}/clusters/{cluster}/settings` * * `projects/{project}/zones/{zone}/clusters/{cluster}/settings` * </pre> * * <code>repeated string names = 2;</code> * * @return A list containing the names. */ public com.google.protobuf.ProtocolStringList getNamesList() { return names_; } /** * * * <pre> * The names of the settings to retrieve. * A maximum of 1000 settings can be retrieved in a batch. * Formats: * * `organizations/{organization}/settings` * * `folders/{folder}/settings` * * `projects/{project}/settings` * * `projects/{project}/locations/{location}/clusters/{cluster}/settings` * * `projects/{project}/regions/{region}/clusters/{cluster}/settings` * * `projects/{project}/zones/{zone}/clusters/{cluster}/settings` * </pre> * * <code>repeated string names = 2;</code> * * @return The count of names. */ public int getNamesCount() { return names_.size(); } /** * * * <pre> * The names of the settings to retrieve. * A maximum of 1000 settings can be retrieved in a batch. * Formats: * * `organizations/{organization}/settings` * * `folders/{folder}/settings` * * `projects/{project}/settings` * * `projects/{project}/locations/{location}/clusters/{cluster}/settings` * * `projects/{project}/regions/{region}/clusters/{cluster}/settings` * * `projects/{project}/zones/{zone}/clusters/{cluster}/settings` * </pre> * * <code>repeated string names = 2;</code> * * @param index The index of the element to return. * @return The names at the given index. */ public java.lang.String getNames(int index) { return names_.get(index); } /** * * * <pre> * The names of the settings to retrieve. * A maximum of 1000 settings can be retrieved in a batch. * Formats: * * `organizations/{organization}/settings` * * `folders/{folder}/settings` * * `projects/{project}/settings` * * `projects/{project}/locations/{location}/clusters/{cluster}/settings` * * `projects/{project}/regions/{region}/clusters/{cluster}/settings` * * `projects/{project}/zones/{zone}/clusters/{cluster}/settings` * </pre> * * <code>repeated string names = 2;</code> * * @param index The index of the value to return. * @return The bytes of the names at the given index. */ public com.google.protobuf.ByteString getNamesBytes(int index) { return names_.getByteString(index); } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, parent_); } for (int i = 0; i < names_.size(); i++) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, names_.getRaw(i)); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, parent_); } { int dataSize = 0; for (int i = 0; i < names_.size(); i++) { dataSize += computeStringSizeNoTag(names_.getRaw(i)); } size += dataSize; size += 1 * getNamesList().size(); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest)) { return super.equals(obj); } com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest other = (com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest) obj; if (!getParent().equals(other.getParent())) return false; if (!getNamesList().equals(other.getNamesList())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); if (getNamesCount() > 0) { hash = (37 * hash) + NAMES_FIELD_NUMBER; hash = (53 * hash) + getNamesList().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for BatchGetSettings. * </pre> * * Protobuf type {@code google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest) com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.securitycenter.settings.v1beta1.SettingsServiceProto .internal_static_google_cloud_securitycenter_settings_v1beta1_BatchGetSettingsRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.securitycenter.settings.v1beta1.SettingsServiceProto .internal_static_google_cloud_securitycenter_settings_v1beta1_BatchGetSettingsRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest.class, com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest.Builder .class); } // Construct using // com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; parent_ = ""; names_ = com.google.protobuf.LazyStringArrayList.emptyList(); return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.securitycenter.settings.v1beta1.SettingsServiceProto .internal_static_google_cloud_securitycenter_settings_v1beta1_BatchGetSettingsRequest_descriptor; } @java.lang.Override public com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest getDefaultInstanceForType() { return com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest build() { com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest buildPartial() { com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest result = new com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.parent_ = parent_; } if (((from_bitField0_ & 0x00000002) != 0)) { names_.makeImmutable(); result.names_ = names_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest) { return mergeFrom( (com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest other) { if (other == com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest .getDefaultInstance()) return this; if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000001; onChanged(); } if (!other.names_.isEmpty()) { if (names_.isEmpty()) { names_ = other.names_; bitField0_ |= 0x00000002; } else { ensureNamesIsMutable(); names_.addAll(other.names_); } onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { java.lang.String s = input.readStringRequireUtf8(); ensureNamesIsMutable(); names_.add(s); break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object parent_ = ""; /** * * * <pre> * Required. The relative resource name of the organization shared by all of the * settings being retrieved. * Format: * * `organizations/{organization}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. The relative resource name of the organization shared by all of the * settings being retrieved. * Format: * * `organizations/{organization}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. The relative resource name of the organization shared by all of the * settings being retrieved. * Format: * * `organizations/{organization}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The relative resource name of the organization shared by all of the * settings being retrieved. * Format: * * `organizations/{organization}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Required. The relative resource name of the organization shared by all of the * settings being retrieved. * Format: * * `organizations/{organization}` * </pre> * * <code> * string parent = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private com.google.protobuf.LazyStringArrayList names_ = com.google.protobuf.LazyStringArrayList.emptyList(); private void ensureNamesIsMutable() { if (!names_.isModifiable()) { names_ = new com.google.protobuf.LazyStringArrayList(names_); } bitField0_ |= 0x00000002; } /** * * * <pre> * The names of the settings to retrieve. * A maximum of 1000 settings can be retrieved in a batch. * Formats: * * `organizations/{organization}/settings` * * `folders/{folder}/settings` * * `projects/{project}/settings` * * `projects/{project}/locations/{location}/clusters/{cluster}/settings` * * `projects/{project}/regions/{region}/clusters/{cluster}/settings` * * `projects/{project}/zones/{zone}/clusters/{cluster}/settings` * </pre> * * <code>repeated string names = 2;</code> * * @return A list containing the names. */ public com.google.protobuf.ProtocolStringList getNamesList() { names_.makeImmutable(); return names_; } /** * * * <pre> * The names of the settings to retrieve. * A maximum of 1000 settings can be retrieved in a batch. * Formats: * * `organizations/{organization}/settings` * * `folders/{folder}/settings` * * `projects/{project}/settings` * * `projects/{project}/locations/{location}/clusters/{cluster}/settings` * * `projects/{project}/regions/{region}/clusters/{cluster}/settings` * * `projects/{project}/zones/{zone}/clusters/{cluster}/settings` * </pre> * * <code>repeated string names = 2;</code> * * @return The count of names. */ public int getNamesCount() { return names_.size(); } /** * * * <pre> * The names of the settings to retrieve. * A maximum of 1000 settings can be retrieved in a batch. * Formats: * * `organizations/{organization}/settings` * * `folders/{folder}/settings` * * `projects/{project}/settings` * * `projects/{project}/locations/{location}/clusters/{cluster}/settings` * * `projects/{project}/regions/{region}/clusters/{cluster}/settings` * * `projects/{project}/zones/{zone}/clusters/{cluster}/settings` * </pre> * * <code>repeated string names = 2;</code> * * @param index The index of the element to return. * @return The names at the given index. */ public java.lang.String getNames(int index) { return names_.get(index); } /** * * * <pre> * The names of the settings to retrieve. * A maximum of 1000 settings can be retrieved in a batch. * Formats: * * `organizations/{organization}/settings` * * `folders/{folder}/settings` * * `projects/{project}/settings` * * `projects/{project}/locations/{location}/clusters/{cluster}/settings` * * `projects/{project}/regions/{region}/clusters/{cluster}/settings` * * `projects/{project}/zones/{zone}/clusters/{cluster}/settings` * </pre> * * <code>repeated string names = 2;</code> * * @param index The index of the value to return. * @return The bytes of the names at the given index. */ public com.google.protobuf.ByteString getNamesBytes(int index) { return names_.getByteString(index); } /** * * * <pre> * The names of the settings to retrieve. * A maximum of 1000 settings can be retrieved in a batch. * Formats: * * `organizations/{organization}/settings` * * `folders/{folder}/settings` * * `projects/{project}/settings` * * `projects/{project}/locations/{location}/clusters/{cluster}/settings` * * `projects/{project}/regions/{region}/clusters/{cluster}/settings` * * `projects/{project}/zones/{zone}/clusters/{cluster}/settings` * </pre> * * <code>repeated string names = 2;</code> * * @param index The index to set the value at. * @param value The names to set. * @return This builder for chaining. */ public Builder setNames(int index, java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureNamesIsMutable(); names_.set(index, value); bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The names of the settings to retrieve. * A maximum of 1000 settings can be retrieved in a batch. * Formats: * * `organizations/{organization}/settings` * * `folders/{folder}/settings` * * `projects/{project}/settings` * * `projects/{project}/locations/{location}/clusters/{cluster}/settings` * * `projects/{project}/regions/{region}/clusters/{cluster}/settings` * * `projects/{project}/zones/{zone}/clusters/{cluster}/settings` * </pre> * * <code>repeated string names = 2;</code> * * @param value The names to add. * @return This builder for chaining. */ public Builder addNames(java.lang.String value) { if (value == null) { throw new NullPointerException(); } ensureNamesIsMutable(); names_.add(value); bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The names of the settings to retrieve. * A maximum of 1000 settings can be retrieved in a batch. * Formats: * * `organizations/{organization}/settings` * * `folders/{folder}/settings` * * `projects/{project}/settings` * * `projects/{project}/locations/{location}/clusters/{cluster}/settings` * * `projects/{project}/regions/{region}/clusters/{cluster}/settings` * * `projects/{project}/zones/{zone}/clusters/{cluster}/settings` * </pre> * * <code>repeated string names = 2;</code> * * @param values The names to add. * @return This builder for chaining. */ public Builder addAllNames(java.lang.Iterable<java.lang.String> values) { ensureNamesIsMutable(); com.google.protobuf.AbstractMessageLite.Builder.addAll(values, names_); bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * The names of the settings to retrieve. * A maximum of 1000 settings can be retrieved in a batch. * Formats: * * `organizations/{organization}/settings` * * `folders/{folder}/settings` * * `projects/{project}/settings` * * `projects/{project}/locations/{location}/clusters/{cluster}/settings` * * `projects/{project}/regions/{region}/clusters/{cluster}/settings` * * `projects/{project}/zones/{zone}/clusters/{cluster}/settings` * </pre> * * <code>repeated string names = 2;</code> * * @return This builder for chaining. */ public Builder clearNames() { names_ = com.google.protobuf.LazyStringArrayList.emptyList(); bitField0_ = (bitField0_ & ~0x00000002); ; onChanged(); return this; } /** * * * <pre> * The names of the settings to retrieve. * A maximum of 1000 settings can be retrieved in a batch. * Formats: * * `organizations/{organization}/settings` * * `folders/{folder}/settings` * * `projects/{project}/settings` * * `projects/{project}/locations/{location}/clusters/{cluster}/settings` * * `projects/{project}/regions/{region}/clusters/{cluster}/settings` * * `projects/{project}/zones/{zone}/clusters/{cluster}/settings` * </pre> * * <code>repeated string names = 2;</code> * * @param value The bytes of the names to add. * @return This builder for chaining. */ public Builder addNamesBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); ensureNamesIsMutable(); names_.add(value); bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest) } // @@protoc_insertion_point(class_scope:google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest) private static final com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest(); } public static com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<BatchGetSettingsRequest> PARSER = new com.google.protobuf.AbstractParser<BatchGetSettingsRequest>() { @java.lang.Override public BatchGetSettingsRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<BatchGetSettingsRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<BatchGetSettingsRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.securitycenter.settings.v1beta1.BatchGetSettingsRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/jackrabbit-oak
35,670
oak-run-commons/src/test/java/org/apache/jackrabbit/oak/index/indexer/document/flatfile/pipelined/PipelinedTreeStoreIT.java
/* * Licensed to the Apache Software Foundation (ASF) under one * or more contributor license agreements. See the NOTICE file * distributed with this work for additional information * regarding copyright ownership. The ASF licenses this file * to you under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance * with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, * software distributed under the License is distributed on an * "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY * KIND, either express or implied. See the License for the * specific language governing permissions and limitations * under the License. */ package org.apache.jackrabbit.oak.index.indexer.document.flatfile.pipelined; import static java.lang.management.ManagementFactory.getPlatformMBeanServer; import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.pipelined.PipelineITUtil.assertMetrics; import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.pipelined.PipelineITUtil.contentDamPathFilter; import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.pipelined.PipelinedMongoDownloadTask.OAK_INDEXER_PIPELINED_MONGO_CUSTOM_EXCLUDED_PATHS; import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.pipelined.PipelinedMongoDownloadTask.OAK_INDEXER_PIPELINED_MONGO_CUSTOM_EXCLUDE_ENTRIES_REGEX; import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.pipelined.PipelinedMongoDownloadTask.OAK_INDEXER_PIPELINED_MONGO_PARALLEL_DUMP; import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.pipelined.PipelinedMongoDownloadTask.OAK_INDEXER_PIPELINED_MONGO_REGEX_PATH_FILTERING; import static org.apache.jackrabbit.oak.index.indexer.document.flatfile.pipelined.PipelinedMongoDownloadTask.OAK_INDEXER_PIPELINED_RETRY_ON_CONNECTION_ERRORS; import static org.junit.Assert.assertArrayEquals; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertThrows; import static org.junit.Assert.assertTrue; import java.io.File; import java.io.IOException; import java.util.ArrayList; import java.util.Collections; import java.util.Comparator; import java.util.List; import java.util.Map; import java.util.Set; import java.util.concurrent.Executors; import java.util.concurrent.ScheduledExecutorService; import java.util.function.Consumer; import java.util.function.Predicate; import java.util.stream.Collectors; import org.apache.commons.io.FileUtils; import org.apache.commons.lang3.RandomStringUtils; import org.apache.jackrabbit.oak.api.CommitFailedException; import org.apache.jackrabbit.oak.commons.Compression; import org.apache.jackrabbit.oak.commons.PathUtils; import org.apache.jackrabbit.oak.index.indexer.document.flatfile.NodeStateEntryReader; import org.apache.jackrabbit.oak.index.indexer.document.tree.TreeStore; import org.apache.jackrabbit.oak.index.indexer.document.tree.store.TreeSession; import org.apache.jackrabbit.oak.plugins.document.DocumentMKBuilderProvider; import org.apache.jackrabbit.oak.plugins.document.DocumentNodeStore; import org.apache.jackrabbit.oak.plugins.document.MongoConnectionFactory; import org.apache.jackrabbit.oak.plugins.document.MongoUtils; import org.apache.jackrabbit.oak.plugins.document.NodeDocument; import org.apache.jackrabbit.oak.plugins.document.RevisionVector; import org.apache.jackrabbit.oak.plugins.document.util.MongoConnection; import org.apache.jackrabbit.oak.plugins.document.util.Utils; import org.apache.jackrabbit.oak.plugins.index.ConsoleIndexingReporter; import org.apache.jackrabbit.oak.plugins.metric.MetricStatisticsProvider; import org.apache.jackrabbit.oak.spi.blob.MemoryBlobStore; import org.apache.jackrabbit.oak.spi.commit.CommitInfo; import org.apache.jackrabbit.oak.spi.commit.EmptyHook; import org.apache.jackrabbit.oak.spi.filter.PathFilter; import org.apache.jackrabbit.oak.spi.state.NodeBuilder; import org.apache.jackrabbit.oak.stats.Clock; import org.jetbrains.annotations.NotNull; import org.junit.After; import org.junit.AfterClass; import org.junit.Assume; import org.junit.Before; import org.junit.BeforeClass; import org.junit.Ignore; import org.junit.Rule; import org.junit.Test; import org.junit.contrib.java.lang.system.RestoreSystemProperties; import org.junit.rules.TemporaryFolder; public class PipelinedTreeStoreIT { private static ScheduledExecutorService executorService; @Rule public final MongoConnectionFactory connectionFactory = new MongoConnectionFactory(); @Rule public final DocumentMKBuilderProvider builderProvider = new DocumentMKBuilderProvider(); @Rule public final RestoreSystemProperties restoreSystemProperties = new RestoreSystemProperties(); @Rule public final TemporaryFolder sortFolder = new TemporaryFolder(); private MetricStatisticsProvider statsProvider; private ConsoleIndexingReporter indexingReporter; @BeforeClass public static void setup() throws IOException { Assume.assumeTrue(MongoUtils.isAvailable()); executorService = Executors.newSingleThreadScheduledExecutor(); } @AfterClass public static void teardown() { if (executorService != null) { executorService.shutdown(); } } @Before public void before() { MongoConnection c = connectionFactory.getConnection(); if (c != null) { c.getDatabase().drop(); } statsProvider = new MetricStatisticsProvider(getPlatformMBeanServer(), executorService); indexingReporter = new ConsoleIndexingReporter(); } @After public void tear() { MongoConnection c = connectionFactory.getConnection(); if (c != null) { c.getDatabase().drop(); } statsProvider.close(); statsProvider = null; indexingReporter = null; } @Test public void createFFS_mongoFiltering_include_excludes() throws Exception { System.setProperty(OAK_INDEXER_PIPELINED_RETRY_ON_CONNECTION_ERRORS, "false"); System.setProperty(OAK_INDEXER_PIPELINED_MONGO_REGEX_PATH_FILTERING, "true"); Predicate<String> pathPredicate = s -> true; List<PathFilter> pathFilters = List.of(new PathFilter(List.of("/content/dam/2023"), List.of("/content/dam/2023/02"))); testSuccessfulDownload(pathPredicate, pathFilters, List.of( "/|{}", "/content|{}", "/content/dam|{}", "/content/dam/2023|{\"p2\":\"v2023\"}", "/content/dam/2023/01|{\"p1\":\"v202301\"}", "/content/dam/2023/02|{}" ), true); } @Test public void createFFS_mongoFiltering_include_excludes2() throws Exception { System.setProperty(OAK_INDEXER_PIPELINED_RETRY_ON_CONNECTION_ERRORS, "false"); System.setProperty(OAK_INDEXER_PIPELINED_MONGO_REGEX_PATH_FILTERING, "true"); Predicate<String> pathPredicate = s -> true; // NOTE: If a path /a/b is in the excluded paths, the descendants of /a/b will not be downloaded but /a/b will // be downloaded. This is an intentional limitation of the logic to compute the Mongo filter which was done // to avoid the extra complexity of also filtering the root of the excluded tree. The transform stage would anyway // filter out these additional documents. List<PathFilter> pathFilters = List.of(new PathFilter(List.of("/content/dam/1000", "/content/dam/2022"), List.of("/content/dam/2022/02", "/content/dam/2022/04"))); testSuccessfulDownload(pathPredicate, pathFilters, List.of( "/|{}", "/content|{}", "/content/dam|{}", "/content/dam/1000|{}", "/content/dam/1000/12|{\"p1\":\"v100012\"}", "/content/dam/2022|{}", "/content/dam/2022/01|{\"p1\":\"v202201\"}", "/content/dam/2022/01/01|{\"p1\":\"v20220101\"}", "/content/dam/2022/02|{\"p1\":\"v202202\"}", "/content/dam/2022/03|{\"p1\":\"v202203\"}", "/content/dam/2022/04|{\"p1\":\"v202204\"}" ), true); } @Test public void createFFS_mongoFiltering_include_excludes3() throws Exception { System.setProperty(OAK_INDEXER_PIPELINED_RETRY_ON_CONNECTION_ERRORS, "false"); System.setProperty(OAK_INDEXER_PIPELINED_MONGO_REGEX_PATH_FILTERING, "true"); Predicate<String> pathPredicate = s -> true; List<PathFilter> pathFilters = List.of(new PathFilter(List.of("/"), List.of("/content/dam", "/etc", "/home", "/jcr:system"))); testSuccessfulDownload(pathPredicate, pathFilters, List.of( "/|{}", "/content|{}", "/content/dam|{}", "/etc|{}", "/home|{}", "/jcr:system|{}" ), true); } @Test public void createFFS_mongoFiltering_include_excludes_retryOnConnectionErrors() throws Exception { System.setProperty(OAK_INDEXER_PIPELINED_RETRY_ON_CONNECTION_ERRORS, "true"); System.setProperty(OAK_INDEXER_PIPELINED_MONGO_REGEX_PATH_FILTERING, "true"); Predicate<String> pathPredicate = s -> true; List<PathFilter> pathFilters = List.of(new PathFilter(List.of("/"), List.of("/content/dam", "/etc", "/home", "/jcr:system"))); testSuccessfulDownload(pathPredicate, pathFilters, List.of( "/|{}", "/content|{}", "/content/dam|{}", "/etc|{}", "/home|{}", "/jcr:system|{}" ), true); } @Test public void createFFS_mongoFiltering_include_excludes4() throws Exception { System.setProperty(OAK_INDEXER_PIPELINED_RETRY_ON_CONNECTION_ERRORS, "false"); System.setProperty(OAK_INDEXER_PIPELINED_MONGO_REGEX_PATH_FILTERING, "true"); Predicate<String> pathPredicate = s -> true; List<PathFilter> pathFilters = List.of( new PathFilter(List.of("/content/dam/1000"), List.of()), new PathFilter(List.of("/content/dam/2022"), List.of("/content/dam/2022/01")) ); testSuccessfulDownload(pathPredicate, pathFilters, List.of( "/|{}", "/content|{}", "/content/dam|{}", "/content/dam/1000|{}", "/content/dam/1000/12|{\"p1\":\"v100012\"}", "/content/dam/2022|{}", "/content/dam/2022/01|{\"p1\":\"v202201\"}", "/content/dam/2022/02|{\"p1\":\"v202202\"}", "/content/dam/2022/02/01|{\"p1\":\"v20220201\"}", "/content/dam/2022/02/02|{\"p1\":\"v20220202\"}", "/content/dam/2022/02/03|{\"p1\":\"v20220203\"}", "/content/dam/2022/02/04|{\"p1\":\"v20220204\"}", "/content/dam/2022/03|{\"p1\":\"v202203\"}", "/content/dam/2022/04|{\"p1\":\"v202204\"}" ), true); } @Test public void createFFS_mongoFiltering_multipleIndexes() throws Exception { System.setProperty(OAK_INDEXER_PIPELINED_MONGO_REGEX_PATH_FILTERING, "true"); Predicate<String> pathPredicate = s -> true; PathFilter pathFilter = new PathFilter(List.of("/content/dam/1000", "/content/dam/2023", "/content/dam/2023/01"), List.of()); List<PathFilter> pathFilters = List.of(pathFilter); testSuccessfulDownload(pathPredicate, pathFilters, List.of( "/|{}", "/content|{}", "/content/dam|{}", "/content/dam/1000|{}", "/content/dam/1000/12|{\"p1\":\"v100012\"}", "/content/dam/2023|{\"p2\":\"v2023\"}", "/content/dam/2023/01|{\"p1\":\"v202301\"}", "/content/dam/2023/02|{}", "/content/dam/2023/02/28|{\"p1\":\"v20230228\"}" ), true); } @Test public void createFFS_filter_long_paths() throws Exception { System.setProperty(OAK_INDEXER_PIPELINED_RETRY_ON_CONNECTION_ERRORS, "false"); System.setProperty(OAK_INDEXER_PIPELINED_MONGO_REGEX_PATH_FILTERING, "true"); // Create a filter on the node with the longest path String longestLine = PipelineITUtil.EXPECTED_FFS.stream().max(Comparator.comparingInt(String::length)).get(); String longestPath = longestLine.substring(0, longestLine.lastIndexOf("|")); String parent = PathUtils.getParentPath(longestPath); Predicate<String> pathPredicate = s -> true; List<PathFilter> pathFilters = List.of(new PathFilter(List.of(parent), List.of())); // The results should contain all the parents of the node with the longest path ArrayList<String> expected = new ArrayList<>(); expected.add(longestPath + "|{}"); while (true) { expected.add(parent + "|{}"); if (parent.equals("/")) { break; } parent = PathUtils.getParentPath(parent); } // The list above has the longest paths first, reverse it to match the order in the FFS Collections.reverse(expected); testSuccessfulDownload(pathPredicate, pathFilters, expected, false); } @Test public void createFFSCustomExcludePathsRegexRetryOnConnectionErrors() throws Exception { Predicate<String> pathPredicate = s -> contentDamPathFilter.filter(s) != PathFilter.Result.EXCLUDE; testPipelinedStrategy(Map.of( // Filter all nodes ending in /metadata.xml or having a path section with ".*.jpg" OAK_INDEXER_PIPELINED_MONGO_CUSTOM_EXCLUDE_ENTRIES_REGEX, "/metadata.xml$|/.*.jpg/.*", OAK_INDEXER_PIPELINED_RETRY_ON_CONNECTION_ERRORS, "true", OAK_INDEXER_PIPELINED_MONGO_REGEX_PATH_FILTERING, "false" ), this::buildNodeStoreForExcludedRegexTest, pathPredicate, null, excludedPathsRegexTestExpected); } @Test public void createFFSCustomExcludePathsRegexNoRetryOnConnectionError() throws Exception { Predicate<String> pathPredicate = s -> contentDamPathFilter.filter(s) != PathFilter.Result.EXCLUDE; testPipelinedStrategy(Map.of( // Filter all nodes ending in /metadata.xml or having a path section with ".*.jpg" OAK_INDEXER_PIPELINED_MONGO_CUSTOM_EXCLUDE_ENTRIES_REGEX, "/metadata.xml$|/.*.jpg/.*", OAK_INDEXER_PIPELINED_RETRY_ON_CONNECTION_ERRORS, "false", OAK_INDEXER_PIPELINED_MONGO_REGEX_PATH_FILTERING, "false" ), this::buildNodeStoreForExcludedRegexTest, pathPredicate, null, excludedPathsRegexTestExpected); } @Test public void createFFSCustomExcludePathsRegexRetryOnConnectionErrorsRegexFiltering() throws Exception { Predicate<String> pathPredicate = s -> contentDamPathFilter.filter(s) != PathFilter.Result.EXCLUDE; testPipelinedStrategy(Map.of( // Filter all nodes ending in /metadata.xml or having a path section with ".*.jpg" OAK_INDEXER_PIPELINED_MONGO_CUSTOM_EXCLUDE_ENTRIES_REGEX, "/metadata.xml$|/.*.jpg/.*", OAK_INDEXER_PIPELINED_RETRY_ON_CONNECTION_ERRORS, "true", OAK_INDEXER_PIPELINED_MONGO_REGEX_PATH_FILTERING, "true" ), this::buildNodeStoreForExcludedRegexTest, pathPredicate, List.of(contentDamPathFilter), excludedPathsRegexTestExpected); } @Test public void createFFSCustomExcludePathsRegexNoRetryOnConnectionErrorRegexFiltering() throws Exception { Predicate<String> pathPredicate = s -> contentDamPathFilter.filter(s) != PathFilter.Result.EXCLUDE; testPipelinedStrategy(Map.of( // Filter all nodes ending in /metadata.xml or having a path section with ".*.jpg" OAK_INDEXER_PIPELINED_MONGO_CUSTOM_EXCLUDE_ENTRIES_REGEX, "/metadata.xml$|/.*.jpg/.*", OAK_INDEXER_PIPELINED_RETRY_ON_CONNECTION_ERRORS, "false", OAK_INDEXER_PIPELINED_MONGO_REGEX_PATH_FILTERING, "true" ), this::buildNodeStoreForExcludedRegexTest, pathPredicate, List.of(contentDamPathFilter), excludedPathsRegexTestExpected); } @Test public void getOnlyModified() throws Exception { long minModified = 0; try (MongoTestBackend rwStore = createNodeStore(false)) { DocumentNodeStore rwNodeStore = rwStore.documentNodeStore; @NotNull NodeBuilder rootBuilder = rwNodeStore.getRoot().builder(); @NotNull NodeBuilder contentDamBuilder = rootBuilder.child("content").child("dam"); contentDamBuilder.child("old.png"); contentDamBuilder.child("change.png").setProperty("test", 0); rwNodeStore.merge(rootBuilder, EmptyHook.INSTANCE, CommitInfo.EMPTY); Clock clock = rwNodeStore.getClock(); long oldMod = NodeDocument.getModifiedInSecs(clock.getTime()); // wait until the modified time changes do { clock.waitUntil(clock.getTime() + 1000); minModified = NodeDocument.getModifiedInSecs(clock.getTime()); } while (oldMod == minModified); rootBuilder = rwNodeStore.getRoot().builder(); contentDamBuilder = rootBuilder.child("content").child("dam"); contentDamBuilder.child("change.png").setProperty("test", 1); contentDamBuilder.child("new.png"); rwNodeStore.merge(rootBuilder, EmptyHook.INSTANCE, CommitInfo.EMPTY); } // download only the recent changes try (MongoTestBackend roStore = createNodeStore(true)) { Predicate<String> pathPredicate = s -> contentDamPathFilter.filter(s) != PathFilter.Result.EXCLUDE; List<PathFilter> mongoRegexPathFilter = List.of(contentDamPathFilter); PipelinedTreeStoreStrategy pipelinedStrategy = createStrategy( roStore, pathPredicate, mongoRegexPathFilter, minModified); File file = pipelinedStrategy.createSortedStoreFile(); assertTrue(file.exists()); List<String> result = readAllEntries(file); List<String> expected = List.of( "/|{}", "/content|{}", "/content/dam|{}", "/content/dam/change.png|{\"test\":1}", "/content/dam/new.png|{}" ); assertEquals(expected.toString(), result.toString()); assertMetrics(statsProvider); } // download everything try (MongoTestBackend roStore = createNodeStore(true)) { Predicate<String> pathPredicate = s -> contentDamPathFilter.filter(s) != PathFilter.Result.EXCLUDE; List<PathFilter> mongoRegexPathFilter = List.of(contentDamPathFilter); PipelinedTreeStoreStrategy pipelinedStrategy = createStrategy( roStore, pathPredicate, mongoRegexPathFilter, 0); File file = pipelinedStrategy.createSortedStoreFile(); assertTrue(file.exists()); List<String> result = readAllEntries(file); List<String> expected = List.of( "/|{}", "/content|{}", "/content/dam|{}", "/content/dam/change.png|{\"test\":1}", "/content/dam/new.png|{}", "/content/dam/old.png|{}" ); assertEquals(expected, result); assertMetrics(statsProvider); } } private void buildNodeStoreForExcludedRegexTest(DocumentNodeStore rwNodeStore) { @NotNull NodeBuilder rootBuilder = rwNodeStore.getRoot().builder(); @NotNull NodeBuilder contentDamBuilder = rootBuilder.child("content").child("dam"); contentDamBuilder.child("a.jpg").child("jcr:content").child("metadata.xml"); contentDamBuilder.child("a.jpg").child("jcr:content").child("metadata.text"); contentDamBuilder.child("image_a.png").child("jcr:content").child("metadata.text"); contentDamBuilder.child("image_a.png").child("jcr:content").child("metadata.xml"); try { rwNodeStore.merge(rootBuilder, EmptyHook.INSTANCE, CommitInfo.EMPTY); } catch (CommitFailedException e) { throw new RuntimeException(e); } } private final List<String> excludedPathsRegexTestExpected = List.of( "/|{}", "/content|{}", "/content/dam|{}", "/content/dam/a.jpg|{}", "/content/dam/image_a.png|{}", "/content/dam/image_a.png/jcr:content|{}", "/content/dam/image_a.png/jcr:content/metadata.text|{}" ); private void testPipelinedStrategy(Map<String, String> settings, Consumer<DocumentNodeStore> contentBuilder, Predicate<String> pathPredicate, List<PathFilter> pathFilters, List<String> expected) throws IOException { settings.forEach(System::setProperty); try (MongoTestBackend rwStore = createNodeStore(false)) { DocumentNodeStore rwNodeStore = rwStore.documentNodeStore; contentBuilder.accept(rwNodeStore); try (MongoTestBackend roStore = createNodeStore(true)) { PipelinedTreeStoreStrategy pipelinedStrategy = createStrategy(roStore, pathPredicate, pathFilters); File file = pipelinedStrategy.createSortedStoreFile(); assertTrue(file.exists()); assertEquals(expected, readAllEntries(file)); assertMetrics(statsProvider); } } } private void testSuccessfulDownload(Predicate<String> pathPredicate, List<PathFilter> pathFilters) throws CommitFailedException, IOException { testSuccessfulDownload(pathPredicate, pathFilters, PipelineITUtil.EXPECTED_FFS, false); } private void testSuccessfulDownload(Predicate<String> pathPredicate, List<PathFilter> mongoRegexPathFilter, List<String> expected, boolean ignoreLongPaths) throws CommitFailedException, IOException { try (MongoTestBackend rwStore = createNodeStore(false)) { PipelineITUtil.createContent(rwStore.documentNodeStore); } try (MongoTestBackend roStore = createNodeStore(true)) { PipelinedTreeStoreStrategy pipelinedStrategy = createStrategy(roStore, pathPredicate, mongoRegexPathFilter); File file = pipelinedStrategy.createSortedStoreFile(); assertTrue(file.exists()); List<String> result = readAllEntries(file); if (ignoreLongPaths) { // Remove the long paths from the result. The filter on Mongo is best-effort, it will download long path // documents, even if they do not match the includedPaths. result = result.stream() .filter(s -> { String name = s.split("\\|")[0]; return name.length() < Utils.PATH_LONG; }) .collect(Collectors.toList()); } assertEquals(expected, result); assertMetrics(statsProvider); } } @Test public void createFFS_pathPredicateDoesNotMatch() throws Exception { try (MongoTestBackend rwStore = createNodeStore(false)) { PipelineITUtil.createContent(rwStore.documentNodeStore); } try (MongoTestBackend roStore = createNodeStore(true)) { Predicate<String> pathPredicate = s -> s.startsWith("/content/dam/does-not-exist"); PipelinedTreeStoreStrategy pipelinedStrategy = createStrategy(roStore, pathPredicate, null); File file = pipelinedStrategy.createSortedStoreFile(); assertTrue(file.exists()); assertEquals("[]", readAllEntries(file).toString()); } } @Test public void createFFS_badNumberOfTransformThreads() throws CommitFailedException, IOException { System.setProperty(PipelinedStrategy.OAK_INDEXER_PIPELINED_TRANSFORM_THREADS, "0"); try (MongoTestBackend rwStore = createNodeStore(false)) { PipelineITUtil.createContent(rwStore.documentNodeStore); } try (MongoTestBackend roStore = createNodeStore(true)) { assertThrows("Invalid value for property " + PipelinedStrategy.OAK_INDEXER_PIPELINED_TRANSFORM_THREADS + ": 0. Must be > 0", IllegalArgumentException.class, () -> createStrategy(roStore) ); } } @Test public void createFFS_badWorkingMemorySetting() throws CommitFailedException, IOException { System.setProperty(PipelinedStrategy.OAK_INDEXER_PIPELINED_WORKING_MEMORY_MB, "-1"); try (MongoTestBackend rwStore = createNodeStore(false)) { PipelineITUtil.createContent(rwStore.documentNodeStore); } try (MongoTestBackend roStore = createNodeStore(true)) { assertThrows("Invalid value for property " + PipelinedStrategy.OAK_INDEXER_PIPELINED_WORKING_MEMORY_MB + ": -1. Must be >= 0", IllegalArgumentException.class, () -> createStrategy(roStore) ); } } @Test public void createFFS_smallNumberOfDocsPerBatch() throws Exception { System.setProperty(PipelinedStrategy.OAK_INDEXER_PIPELINED_MONGO_DOC_BATCH_MAX_NUMBER_OF_DOCUMENTS, "2"); Predicate<String> pathPredicate = s -> contentDamPathFilter.filter(s) != PathFilter.Result.EXCLUDE; List<PathFilter> pathFilters = null; testSuccessfulDownload(pathPredicate, pathFilters); } @Test public void createFFS_largeMongoDocuments() throws Exception { System.setProperty(PipelinedStrategy.OAK_INDEXER_PIPELINED_MONGO_DOC_BATCH_MAX_SIZE_MB, "1"); System.setProperty(PipelinedStrategy.OAK_INDEXER_PIPELINED_MONGO_DOC_QUEUE_RESERVED_MEMORY_MB, "32"); Predicate<String> pathPredicate = s -> contentDamPathFilter.filter(s) != PathFilter.Result.EXCLUDE; List<PathFilter> pathFilters = null; String longString = RandomStringUtils.insecure().next((int) (10 * FileUtils.ONE_MB), true, true); try (MongoTestBackend rwStore = createNodeStore(false)) { @NotNull NodeBuilder rootBuilder = rwStore.documentNodeStore.getRoot().builder(); // This property does not fit in the reserved memory, but must still be processed without errors @NotNull NodeBuilder contentDamBuilder = rootBuilder.child("content").child("dam"); contentDamBuilder.child("2021").child("01").setProperty("p1", "v202101"); contentDamBuilder.child("2022").child("01").setProperty("p1", longString); contentDamBuilder.child("2023").child("01").setProperty("p1", "v202301"); rwStore.documentNodeStore.merge(rootBuilder, EmptyHook.INSTANCE, CommitInfo.EMPTY); } List<String> expected = List.of( "/|{}", "/content|{}", "/content/dam|{}", "/content/dam/2021|{}", "/content/dam/2021/01|{\"p1\":\"v202101\"}", "/content/dam/2022|{}", "/content/dam/2022/01|{\"p1\":\"" + longString + "\"}", "/content/dam/2023|{}", "/content/dam/2023/01|{\"p1\":\"v202301\"}" ); try (MongoTestBackend roStore = createNodeStore(true)) { PipelinedTreeStoreStrategy pipelinedStrategy = createStrategy(roStore, pathPredicate, pathFilters); File file = pipelinedStrategy.createSortedStoreFile(); assertTrue(file.exists()); assertArrayEquals(expected.toArray(new String[0]), readAllEntriesArray(file)); assertMetrics(statsProvider); } } static String[] readAllEntriesArray(File dir) throws IOException { return readAllEntries(dir).toArray(new String[0]); } static List<String> readAllEntries(File dir) throws IOException { TreeStore treeStore = new TreeStore("test", dir, new NodeStateEntryReader(new MemoryBlobStore()), 1); ArrayList<String> list = new ArrayList<>(); TreeSession session = treeStore.getSession(); for (String k : session.keys()) { String v = session.get(k); if (!v.isEmpty()) { list.add(k + "|" + v); } } treeStore.close(); return list; } @Test public void createFFS_mongoFiltering_custom_excluded_paths_1() throws Exception { System.setProperty(OAK_INDEXER_PIPELINED_MONGO_REGEX_PATH_FILTERING, "true"); System.setProperty(OAK_INDEXER_PIPELINED_MONGO_CUSTOM_EXCLUDED_PATHS, "/etc,/home"); Predicate<String> pathPredicate = s -> true; List<PathFilter> pathFilters = List.of(new PathFilter(List.of("/"), List.of("/content/dam", "/etc", "/home", "/jcr:system"))); testSuccessfulDownload(pathPredicate, pathFilters, List.of( "/|{}", "/content|{}", "/content/dam|{}", "/etc|{}", "/home|{}", "/jcr:system|{}" ), true); } @Test public void createFFS_mongoFiltering_custom_excluded_paths_2() throws Exception { System.setProperty(OAK_INDEXER_PIPELINED_MONGO_REGEX_PATH_FILTERING, "true"); System.setProperty(OAK_INDEXER_PIPELINED_MONGO_CUSTOM_EXCLUDED_PATHS, "/etc,/home"); Predicate<String> pathPredicate = s -> true; List<PathFilter> pathFilters = List.of(new PathFilter(List.of("/"), List.of("/content/dam", "/jcr:system"))); testSuccessfulDownload(pathPredicate, pathFilters, List.of( "/|{}", "/content|{}", "/content/dam|{}", "/etc|{}", "/home|{}", "/jcr:system|{}" ), true); } @Test public void createFFS_mongoFiltering_custom_excluded_paths_3() throws Exception { System.setProperty(OAK_INDEXER_PIPELINED_MONGO_REGEX_PATH_FILTERING, "true"); System.setProperty(OAK_INDEXER_PIPELINED_MONGO_CUSTOM_EXCLUDED_PATHS, "/etc,/home,/content/dam,/jcr:system"); Predicate<String> pathPredicate = s -> true; List<PathFilter> pathFilters = List.of(new PathFilter(List.of("/"), List.of())); testSuccessfulDownload(pathPredicate, pathFilters, List.of( "/|{}", "/content|{}", "/content/dam|{}", "/etc|{}", "/home|{}", "/jcr:system|{}" ), true); } @Test public void createFFSNoMatches() throws Exception { System.setProperty(OAK_INDEXER_PIPELINED_MONGO_REGEX_PATH_FILTERING, "true"); System.setProperty(OAK_INDEXER_PIPELINED_MONGO_PARALLEL_DUMP, "true"); System.setProperty(OAK_INDEXER_PIPELINED_MONGO_CUSTOM_EXCLUDED_PATHS, "/etc,/home,/content/dam,/jcr:system"); Predicate<String> pathPredicate = t -> true; List<PathFilter> mongoRegexPathFilters = List.of(new PathFilter(List.of("/doesnotexist"), List.of())); // For an included path of /foo, the / should not be included. But the mongo regex filter is only best effort, // and it will download the parents of all the included paths, even if they are empty. This is not a problem, // because the filter at the transform stage will remove these paths. This test has no filter at the transform // stage (pathPredicate is always true), so the / will be included in the result. testSuccessfulDownload(pathPredicate, mongoRegexPathFilters, List.of("/|{}"), true); } @Test(expected = IllegalArgumentException.class) public void createFFS_mongoFiltering_custom_excluded_paths_cannot_exclude_root() throws Exception { System.setProperty(OAK_INDEXER_PIPELINED_MONGO_REGEX_PATH_FILTERING, "true"); System.setProperty(OAK_INDEXER_PIPELINED_MONGO_CUSTOM_EXCLUDED_PATHS, "/etc,/"); Predicate<String> pathPredicate = s -> true; List<PathFilter> pathFilters = List.of(new PathFilter(List.of("/"), List.of())); testSuccessfulDownload(pathPredicate, pathFilters, List.of( "/|{}", "/content|{}", "/content/dam|{}", "/etc|{}", "/home|{}", "/jcr:system|{}" ), true); } @Ignore("This test is for manual execution only. It allocates two byte buffers of 2GB each, which might exceed the memory available in the CI") public void createFFSWithPipelinedStrategy_veryLargeWorkingMemorySetting() throws Exception { System.setProperty(PipelinedStrategy.OAK_INDEXER_PIPELINED_TRANSFORM_THREADS, "1"); System.setProperty(PipelinedStrategy.OAK_INDEXER_PIPELINED_WORKING_MEMORY_MB, "8000"); try (MongoTestBackend rwStore = createNodeStore(false)) { PipelineITUtil.createContent(rwStore.documentNodeStore); } try (MongoTestBackend roStore = createNodeStore(true)) { Predicate<String> pathPredicate = s -> s.startsWith("/content/dam"); PipelinedTreeStoreStrategy pipelinedStrategy = createStrategy(roStore, pathPredicate, null); pipelinedStrategy.createSortedStoreFile(); } } private MongoTestBackend createNodeStore(boolean readOnly) { return PipelineITUtil.createNodeStore(readOnly, connectionFactory, builderProvider); } private PipelinedTreeStoreStrategy createStrategy(MongoTestBackend roStore) { return createStrategy(roStore, s -> true, null); } private PipelinedTreeStoreStrategy createStrategy(MongoTestBackend backend, Predicate<String> pathPredicate, List<PathFilter> mongoRegexPathFilter) { return createStrategy(backend, pathPredicate, mongoRegexPathFilter, 0); } private PipelinedTreeStoreStrategy createStrategy(MongoTestBackend backend, Predicate<String> pathPredicate, List<PathFilter> mongoRegexPathFilter, long minModified) { Set<String> preferredPathElements = Set.of(); RevisionVector rootRevision = backend.documentNodeStore.getRoot().getRootRevision(); indexingReporter.setIndexNames(List.of("testIndex")); return new PipelinedTreeStoreStrategy( backend.mongoClientURI, backend.mongoDocumentStore, backend.documentNodeStore, rootRevision, preferredPathElements, new MemoryBlobStore(), sortFolder.getRoot(), Compression.NONE, pathPredicate, mongoRegexPathFilter, null, minModified, statsProvider, indexingReporter); } }
apache/commons-geometry
36,174
commons-geometry-spherical/src/test/java/org/apache/commons/geometry/spherical/oned/RegionBSPTree1STest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.commons.geometry.spherical.oned; import java.util.List; import org.apache.commons.geometry.core.Region; import org.apache.commons.geometry.core.RegionLocation; import org.apache.commons.geometry.core.partitioning.Split; import org.apache.commons.geometry.core.partitioning.SplitLocation; import org.apache.commons.geometry.euclidean.twod.Vector2D; import org.apache.commons.numbers.angle.Angle; import org.apache.commons.numbers.core.Precision; import org.junit.jupiter.api.Assertions; import org.junit.jupiter.api.Test; class RegionBSPTree1STest { private static final double TEST_EPS = 1e-10; private static final Precision.DoubleEquivalence TEST_PRECISION = Precision.doubleEquivalenceOfEpsilon(TEST_EPS); private static final Transform1S HALF_PI_PLUS_AZ = Transform1S.createRotation(Angle.PI_OVER_TWO); private static final Transform1S PI_MINUS_AZ = Transform1S.createNegation().rotate(Math.PI); @Test void testConstructor_default() { // act final RegionBSPTree1S tree = new RegionBSPTree1S(); // assert Assertions.assertFalse(tree.isFull()); Assertions.assertTrue(tree.isEmpty()); Assertions.assertEquals(0, tree.getSize(), TEST_EPS); Assertions.assertEquals(0, tree.getBoundarySize(), TEST_EPS); Assertions.assertNull(tree.getCentroid()); } @Test void testConstructor_true() { // act final RegionBSPTree1S tree = new RegionBSPTree1S(true); // assert Assertions.assertTrue(tree.isFull()); Assertions.assertFalse(tree.isEmpty()); Assertions.assertEquals(Angle.TWO_PI, tree.getSize(), TEST_EPS); Assertions.assertEquals(0, tree.getBoundarySize(), TEST_EPS); Assertions.assertNull(tree.getCentroid()); } @Test void testConstructor_false() { // act final RegionBSPTree1S tree = new RegionBSPTree1S(false); // assert Assertions.assertFalse(tree.isFull()); Assertions.assertTrue(tree.isEmpty()); Assertions.assertEquals(0, tree.getSize(), TEST_EPS); Assertions.assertEquals(0, tree.getBoundarySize(), TEST_EPS); Assertions.assertNull(tree.getCentroid()); } @Test void testFull() { // act final RegionBSPTree1S tree = RegionBSPTree1S.full(); // assert Assertions.assertTrue(tree.isFull()); Assertions.assertFalse(tree.isEmpty()); Assertions.assertEquals(Angle.TWO_PI, tree.getSize(), TEST_EPS); Assertions.assertEquals(0, tree.getBoundarySize(), TEST_EPS); Assertions.assertNull(tree.getCentroid()); } @Test void testEmpty() { // act final RegionBSPTree1S tree = RegionBSPTree1S.empty(); // assert Assertions.assertFalse(tree.isFull()); Assertions.assertTrue(tree.isEmpty()); Assertions.assertEquals(0, tree.getSize(), TEST_EPS); Assertions.assertEquals(0, tree.getBoundarySize(), TEST_EPS); Assertions.assertNull(tree.getCentroid()); } @Test void testCopy() { // arrange final RegionBSPTree1S orig = RegionBSPTree1S.fromInterval(AngularInterval.of(0, Math.PI, TEST_PRECISION)); // act final RegionBSPTree1S copy = orig.copy(); // assert Assertions.assertNotSame(orig, copy); orig.setEmpty(); checkSingleInterval(copy, 0, Math.PI); } @Test void testFromInterval_full() { // act final RegionBSPTree1S tree = RegionBSPTree1S.fromInterval(AngularInterval.full()); // assert Assertions.assertTrue(tree.isFull()); } @Test void testFromInterval_nonFull() { for (double theta = 0.0; theta <= Angle.TWO_PI; theta += 0.2) { // arrange final double max = theta + Angle.PI_OVER_TWO; // act final RegionBSPTree1S tree = RegionBSPTree1S.fromInterval(AngularInterval.of(theta, max, TEST_PRECISION)); checkSingleInterval(tree, theta, max); Assertions.assertEquals(Angle.PI_OVER_TWO, tree.getSize(), TEST_EPS); Assertions.assertEquals(0, tree.getBoundarySize(), TEST_EPS); Assertions.assertEquals(Angle.Rad.WITHIN_0_AND_2PI.applyAsDouble(theta + (0.25 * Math.PI)), tree.getCentroid().getNormalizedAzimuth(), TEST_EPS); } } @Test void testClassify_full() { // arrange final RegionBSPTree1S tree = RegionBSPTree1S.full(); // act/assert for (double az = -Angle.TWO_PI; az <= 2 * Angle.TWO_PI; az += 0.2) { checkClassify(tree, RegionLocation.INSIDE, az); } } @Test void testClassify_empty() { // arrange final RegionBSPTree1S tree = RegionBSPTree1S.empty(); // act/assert for (double az = -Angle.TWO_PI; az <= 2 * Angle.TWO_PI; az += 0.2) { checkClassify(tree, RegionLocation.OUTSIDE, az); } } @Test void testClassify() { // arrange final RegionBSPTree1S tree = RegionBSPTree1S.fromInterval( AngularInterval.of(-Angle.PI_OVER_TWO, Angle.PI_OVER_TWO, TEST_PRECISION)); // act/assert checkClassify(tree, RegionLocation.BOUNDARY, -Angle.PI_OVER_TWO, Angle.PI_OVER_TWO, -Angle.PI_OVER_TWO - Angle.TWO_PI, Angle.PI_OVER_TWO + Angle.TWO_PI); checkClassify(tree, RegionLocation.INSIDE, 0.0, 0.5, -0.5, Angle.TWO_PI, 0.5 + Angle.TWO_PI, -0.5 - Angle.TWO_PI); checkClassify(tree, RegionLocation.OUTSIDE, Math.PI, Math.PI + 0.5, Math.PI - 0.5, Math.PI + Angle.TWO_PI, Math.PI + 0.5 + Angle.TWO_PI, Math.PI - 0.5 + Angle.TWO_PI); } @Test void testToIntervals_full() { // arrange final RegionBSPTree1S tree = RegionBSPTree1S.full(); // act final List<AngularInterval> intervals = tree.toIntervals(); // assert Assertions.assertEquals(1, intervals.size()); final AngularInterval interval = intervals.get(0); Assertions.assertTrue(interval.isFull()); } @Test void testToIntervals_empty() { // arrange final RegionBSPTree1S tree = RegionBSPTree1S.empty(); // act final List<AngularInterval> intervals = tree.toIntervals(); // assert Assertions.assertEquals(0, intervals.size()); } @Test void testToIntervals_singleCut() { // arrange final RegionBSPTree1S tree = RegionBSPTree1S.empty(); for (double theta = 0; theta <= Angle.TWO_PI; theta += 0.2) { // act/assert tree.setEmpty(); tree.getRoot().cut(CutAngles.createPositiveFacing(theta, TEST_PRECISION)); checkSingleInterval(tree, 0, theta); tree.setEmpty(); tree.getRoot().cut(CutAngles.createNegativeFacing(theta, TEST_PRECISION)); checkSingleInterval(tree, theta, Angle.TWO_PI); } } @Test void testToIntervals_wrapAround_joinedIntervalsOnPositiveSide() { // arrange final RegionBSPTree1S tree = RegionBSPTree1S.empty(); tree.add(AngularInterval.of(0.25 * Math.PI, Angle.PI_OVER_TWO, TEST_PRECISION)); tree.add(AngularInterval.of(1.5 * Math.PI, 0.25 * Math.PI, TEST_PRECISION)); // act final List<AngularInterval> intervals = tree.toIntervals(); // assert Assertions.assertEquals(1, intervals.size()); checkInterval(intervals.get(0), 1.5 * Math.PI, Angle.PI_OVER_TWO); } @Test void testToIntervals_wrapAround_joinedIntervalsOnNegativeSide() { // arrange final RegionBSPTree1S tree = RegionBSPTree1S.empty(); tree.add(AngularInterval.of(1.75 * Math.PI, Angle.PI_OVER_TWO, TEST_PRECISION)); tree.add(AngularInterval.of(1.5 * Math.PI, 1.75 * Math.PI, TEST_PRECISION)); // act final List<AngularInterval> intervals = tree.toIntervals(); // assert Assertions.assertEquals(1, intervals.size()); checkInterval(intervals.get(0), 1.5 * Math.PI, Angle.PI_OVER_TWO); } @Test void testToIntervals_multipleIntervals() { // arrange final RegionBSPTree1S tree = RegionBSPTree1S.empty(); tree.add(AngularInterval.of(-Angle.PI_OVER_TWO, Angle.PI_OVER_TWO, TEST_PRECISION)); tree.add(AngularInterval.of(Math.PI - 0.5, Math.PI, TEST_PRECISION)); tree.add(AngularInterval.of(Math.PI, Math.PI + 0.5, TEST_PRECISION)); // act final List<AngularInterval> intervals = tree.toIntervals(); // assert Assertions.assertEquals(2, intervals.size()); checkInterval(intervals.get(0), Math.PI - 0.5, Math.PI + 0.5); checkInterval(intervals.get(1), -Angle.PI_OVER_TWO, Angle.PI_OVER_TWO); } @Test void testToIntervals_multipleIntervals_complement() { // arrange final RegionBSPTree1S tree = RegionBSPTree1S.empty(); tree.add(AngularInterval.of(-Angle.PI_OVER_TWO, Angle.PI_OVER_TWO, TEST_PRECISION)); tree.add(AngularInterval.of(Math.PI - 0.5, Math.PI, TEST_PRECISION)); tree.add(AngularInterval.of(Math.PI, Math.PI + 0.5, TEST_PRECISION)); tree.complement(); // act final List<AngularInterval> intervals = tree.toIntervals(); // assert Assertions.assertEquals(2, intervals.size()); checkInterval(intervals.get(0), Angle.PI_OVER_TWO, Math.PI - 0.5); checkInterval(intervals.get(1), Math.PI + 0.5, -Angle.PI_OVER_TWO); } @Test void testSplit_empty() { // arrange final RegionBSPTree1S tree = RegionBSPTree1S.empty(); // act/assert Assertions.assertEquals(SplitLocation.NEITHER, tree.split(CutAngles.createPositiveFacing(0, TEST_PRECISION)).getLocation()); Assertions.assertEquals(SplitLocation.NEITHER, tree.split(CutAngles.createNegativeFacing(Angle.PI_OVER_TWO, TEST_PRECISION)).getLocation()); Assertions.assertEquals(SplitLocation.NEITHER, tree.split(CutAngles.createPositiveFacing(Math.PI, TEST_PRECISION)).getLocation()); Assertions.assertEquals(SplitLocation.NEITHER, tree.split(CutAngles.createNegativeFacing(-Angle.PI_OVER_TWO, TEST_PRECISION)).getLocation()); Assertions.assertEquals(SplitLocation.NEITHER, tree.split(CutAngles.createPositiveFacing(Angle.TWO_PI, TEST_PRECISION)).getLocation()); } @Test void testSplit_full() { // arrange final RegionBSPTree1S tree = RegionBSPTree1S.full(); // act/assert checkSimpleSplit( tree.split(CutAngles.createPositiveFacing(1e-6, TEST_PRECISION)), AngularInterval.of(0, 1e-6, TEST_PRECISION), AngularInterval.of(1e-6, Angle.TWO_PI, TEST_PRECISION) ); checkSimpleSplit( tree.split(CutAngles.createNegativeFacing(Angle.PI_OVER_TWO, TEST_PRECISION)), AngularInterval.of(Angle.PI_OVER_TWO, Angle.TWO_PI, TEST_PRECISION), AngularInterval.of(0, Angle.PI_OVER_TWO, TEST_PRECISION) ); checkSimpleSplit( tree.split(CutAngles.createPositiveFacing(Math.PI, TEST_PRECISION)), AngularInterval.of(0, Math.PI, TEST_PRECISION), AngularInterval.of(Math.PI, Angle.TWO_PI, TEST_PRECISION) ); checkSimpleSplit( tree.split(CutAngles.createNegativeFacing(-Angle.PI_OVER_TWO, TEST_PRECISION)), AngularInterval.of(-Angle.PI_OVER_TWO, Angle.TWO_PI, TEST_PRECISION), AngularInterval.of(0, -Angle.PI_OVER_TWO, TEST_PRECISION) ); checkSimpleSplit( tree.split(CutAngles.createPositiveFacing(Angle.TWO_PI - 1e-6, TEST_PRECISION)), AngularInterval.of(0, Angle.TWO_PI - 1e-6, TEST_PRECISION), AngularInterval.of(Angle.TWO_PI - 1e-6, Angle.TWO_PI, TEST_PRECISION) ); } @Test void testSplit_full_cutEquivalentToZero() { // arrange final RegionBSPTree1S tree = RegionBSPTree1S.full(); final AngularInterval twoPi = AngularInterval.of(0, Angle.TWO_PI, TEST_PRECISION); // act/assert checkSimpleSplit( tree.split(CutAngles.createPositiveFacing(0, TEST_PRECISION)), null, twoPi ); checkSimpleSplit( tree.split(CutAngles.createNegativeFacing(0, TEST_PRECISION)), twoPi, null ); checkSimpleSplit( tree.split(CutAngles.createPositiveFacing(Angle.TWO_PI - 1e-18, TEST_PRECISION)), null, twoPi ); checkSimpleSplit( tree.split(CutAngles.createNegativeFacing(Angle.TWO_PI - 1e-18, TEST_PRECISION)), twoPi, null ); } @Test void testSplit_singleInterval() { // arrange final AngularInterval interval = AngularInterval.of(Angle.PI_OVER_TWO, -Angle.PI_OVER_TWO, TEST_PRECISION); final RegionBSPTree1S tree = interval.toTree(); // act checkSimpleSplit( tree.split(CutAngles.createNegativeFacing(0, TEST_PRECISION)), interval, null ); checkSimpleSplit( tree.split(CutAngles.createNegativeFacing(-Angle.TWO_PI, TEST_PRECISION)), interval, null ); checkSimpleSplit( tree.split(CutAngles.createPositiveFacing(Angle.TWO_PI + Angle.PI_OVER_TWO, TEST_PRECISION)), null, interval ); checkSimpleSplit( tree.split(CutAngles.createPositiveFacing(1.5 * Math.PI, TEST_PRECISION)), interval, null ); checkSimpleSplit( tree.split(CutAngles.createNegativeFacing(Math.PI, TEST_PRECISION)), AngularInterval.of(Math.PI, -Angle.PI_OVER_TWO, TEST_PRECISION), AngularInterval.of(Angle.PI_OVER_TWO, Math.PI, TEST_PRECISION) ); } @Test void testSplit_singleIntervalSplitIntoTwoIntervalsOnSameSide() { // arrange final RegionBSPTree1S tree = AngularInterval.of(-Angle.PI_OVER_TWO, Angle.PI_OVER_TWO, TEST_PRECISION).toTree(); final CutAngle cut = CutAngles.createPositiveFacing(0, TEST_PRECISION); // act final Split<RegionBSPTree1S> split = tree.split(cut); // assert Assertions.assertEquals(SplitLocation.PLUS, split.getLocation()); final RegionBSPTree1S minus = split.getMinus(); Assertions.assertNull(minus); final RegionBSPTree1S plus = split.getPlus(); final List<AngularInterval> plusIntervals = plus.toIntervals(); Assertions.assertEquals(1, plusIntervals.size()); checkInterval(plusIntervals.get(0), -Angle.PI_OVER_TWO, Angle.PI_OVER_TWO); } @Test void testSplit_multipleRegions() { // arrange final RegionBSPTree1S tree = RegionBSPTree1S.empty(); tree.add(AngularInterval.of(Angle.TWO_PI - 1, Angle.PI_OVER_TWO, TEST_PRECISION)); tree.add(AngularInterval.of(Math.PI, -Angle.PI_OVER_TWO, TEST_PRECISION)); final CutAngle cut = CutAngles.createNegativeFacing(1, TEST_PRECISION); // act final Split<RegionBSPTree1S> split = tree.split(cut); // assert Assertions.assertEquals(SplitLocation.BOTH, split.getLocation()); final RegionBSPTree1S minus = split.getMinus(); final List<AngularInterval> minusIntervals = minus.toIntervals(); Assertions.assertEquals(3, minusIntervals.size()); checkInterval(minusIntervals.get(0), 1, Angle.PI_OVER_TWO); checkInterval(minusIntervals.get(1), Math.PI, -Angle.PI_OVER_TWO); checkInterval(minusIntervals.get(2), Angle.TWO_PI - 1, 0); final RegionBSPTree1S plus = split.getPlus(); final List<AngularInterval> plusIntervals = plus.toIntervals(); Assertions.assertEquals(1, plusIntervals.size()); checkInterval(plusIntervals.get(0), 0, 1); } @Test void testSplitDiameter_full() { // arrange final RegionBSPTree1S full = RegionBSPTree1S.full(); final CutAngle splitter = CutAngles.createPositiveFacing(Angle.PI_OVER_TWO, TEST_PRECISION); // act final Split<RegionBSPTree1S> split = full.splitDiameter(splitter); // assert Assertions.assertEquals(SplitLocation.BOTH, split.getLocation()); final RegionBSPTree1S minus = split.getMinus(); final List<AngularInterval> minusIntervals = minus.toIntervals(); Assertions.assertEquals(1, minusIntervals.size()); checkInterval(minusIntervals.get(0), 1.5 * Math.PI, 2.5 * Math.PI); final RegionBSPTree1S plus = split.getPlus(); final List<AngularInterval> plusIntervals = plus.toIntervals(); Assertions.assertEquals(1, plusIntervals.size()); checkInterval(plusIntervals.get(0), Angle.PI_OVER_TWO, 1.5 * Math.PI); } @Test void testSplitDiameter_empty() { // arrange final RegionBSPTree1S empty = RegionBSPTree1S.empty(); final CutAngle splitter = CutAngles.createPositiveFacing(Angle.PI_OVER_TWO, TEST_PRECISION); // act final Split<RegionBSPTree1S> split = empty.splitDiameter(splitter); // assert Assertions.assertEquals(SplitLocation.NEITHER, split.getLocation()); final RegionBSPTree1S minus = split.getMinus(); Assertions.assertNull(minus); final RegionBSPTree1S plus = split.getPlus(); Assertions.assertNull(plus); } @Test void testSplitDiameter_minus_zeroOnMinusSide() { // arrange final RegionBSPTree1S tree = AngularInterval.of(0, 1, TEST_PRECISION).toTree(); final CutAngle splitter = CutAngles.createPositiveFacing(1, TEST_PRECISION); // act final Split<RegionBSPTree1S> split = tree.splitDiameter(splitter); // assert Assertions.assertEquals(SplitLocation.MINUS, split.getLocation()); final RegionBSPTree1S minus = split.getMinus(); final List<AngularInterval> minusIntervals = minus.toIntervals(); Assertions.assertEquals(1, minusIntervals.size()); checkInterval(minusIntervals.get(0), 0, 1); final RegionBSPTree1S plus = split.getPlus(); Assertions.assertNull(plus); } @Test void testSplitDiameter_minus_zeroOnPlusSide() { // arrange final RegionBSPTree1S tree = AngularInterval.of(1, 2, TEST_PRECISION).toTree(); final CutAngle splitter = CutAngles.createNegativeFacing(0, TEST_PRECISION); // act final Split<RegionBSPTree1S> split = tree.splitDiameter(splitter); // assert Assertions.assertEquals(SplitLocation.MINUS, split.getLocation()); final RegionBSPTree1S minus = split.getMinus(); final List<AngularInterval> minusIntervals = minus.toIntervals(); Assertions.assertEquals(1, minusIntervals.size()); checkInterval(minusIntervals.get(0), 1, 2); final RegionBSPTree1S plus = split.getPlus(); Assertions.assertNull(plus); } @Test void testSplitDiameter_plus_zeroOnMinusSide() { // arrange final RegionBSPTree1S tree = RegionBSPTree1S.empty(); tree.add(AngularInterval.of(1, 1.1, TEST_PRECISION)); tree.add(AngularInterval.of(2, 2.1, TEST_PRECISION)); final CutAngle splitter = CutAngles.createPositiveFacing(1, TEST_PRECISION); // act final Split<RegionBSPTree1S> split = tree.splitDiameter(splitter); // assert Assertions.assertEquals(SplitLocation.PLUS, split.getLocation()); final RegionBSPTree1S minus = split.getMinus(); Assertions.assertNull(minus); final RegionBSPTree1S plus = split.getPlus(); final List<AngularInterval> plusIntervals = plus.toIntervals(); Assertions.assertEquals(2, plusIntervals.size()); checkInterval(plusIntervals.get(0), 1, 1.1); checkInterval(plusIntervals.get(1), 2, 2.1); } @Test void testSplitDiameter_plus_zeroOnPlusSide() { // arrange final RegionBSPTree1S tree = RegionBSPTree1S.empty(); tree.add(AngularInterval.of(1, 1.1, TEST_PRECISION)); tree.add(AngularInterval.of(2, 2.1, TEST_PRECISION)); final CutAngle splitter = CutAngles.createNegativeFacing(Math.PI - 1, TEST_PRECISION); // act final Split<RegionBSPTree1S> split = tree.splitDiameter(splitter); // assert Assertions.assertEquals(SplitLocation.PLUS, split.getLocation()); final RegionBSPTree1S minus = split.getMinus(); Assertions.assertNull(minus); final RegionBSPTree1S plus = split.getPlus(); final List<AngularInterval> plusIntervals = plus.toIntervals(); Assertions.assertEquals(2, plusIntervals.size()); checkInterval(plusIntervals.get(0), 1, 1.1); checkInterval(plusIntervals.get(1), 2, 2.1); } @Test void testSplitDiameter_both_zeroOnMinusSide() { // arrange final RegionBSPTree1S tree = RegionBSPTree1S.empty(); tree.add(AngularInterval.of(1, 1.1, TEST_PRECISION)); tree.add(AngularInterval.of(2, 3, TEST_PRECISION)); final CutAngle splitter = CutAngles.createPositiveFacing(2.5, TEST_PRECISION); // act final Split<RegionBSPTree1S> split = tree.splitDiameter(splitter); // assert Assertions.assertEquals(SplitLocation.BOTH, split.getLocation()); final RegionBSPTree1S minus = split.getMinus(); final List<AngularInterval> plusIntervals = minus.toIntervals(); Assertions.assertEquals(2, plusIntervals.size()); checkInterval(plusIntervals.get(0), 1, 1.1); checkInterval(plusIntervals.get(1), 2, 2.5); final RegionBSPTree1S plus = split.getPlus(); final List<AngularInterval> minusIntervals = plus.toIntervals(); Assertions.assertEquals(1, minusIntervals.size()); checkInterval(minusIntervals.get(0), 2.5, 3); } @Test void testSplitDiameter_both_zeroOnPlusSide() { // arrange final RegionBSPTree1S tree = RegionBSPTree1S.empty(); tree.add(AngularInterval.of(1, 1.1, TEST_PRECISION)); tree.add(AngularInterval.of(2, 3, TEST_PRECISION)); final CutAngle splitter = CutAngles.createNegativeFacing(2.5, TEST_PRECISION); // act final Split<RegionBSPTree1S> split = tree.splitDiameter(splitter); // assert Assertions.assertEquals(SplitLocation.BOTH, split.getLocation()); final RegionBSPTree1S minus = split.getMinus(); final List<AngularInterval> minusIntervals = minus.toIntervals(); Assertions.assertEquals(1, minusIntervals.size()); checkInterval(minusIntervals.get(0), 2.5, 3); final RegionBSPTree1S plus = split.getPlus(); final List<AngularInterval> plusIntervals = plus.toIntervals(); Assertions.assertEquals(2, plusIntervals.size()); checkInterval(plusIntervals.get(0), 1, 1.1); checkInterval(plusIntervals.get(1), 2, 2.5); } @Test void testRegionProperties_singleInterval_wrapsZero() { // arrange final RegionBSPTree1S tree = AngularInterval.of(-Angle.PI_OVER_TWO, Math.PI, TEST_PRECISION).toTree(); // act/assert Assertions.assertEquals(1.5 * Math.PI, tree.getSize(), TEST_EPS); Assertions.assertEquals(0, tree.getBoundarySize(), TEST_EPS); Assertions.assertEquals(0.25 * Math.PI, tree.getCentroid().getAzimuth(), TEST_EPS); } @Test void testRegionProperties_singleInterval_doesNotWrap() { // arrange final RegionBSPTree1S tree = AngularInterval.of(Angle.PI_OVER_TWO, Angle.TWO_PI, TEST_PRECISION).toTree(); // act/assert Assertions.assertEquals(1.5 * Math.PI, tree.getSize(), TEST_EPS); Assertions.assertEquals(0, tree.getBoundarySize(), TEST_EPS); Assertions.assertEquals(1.25 * Math.PI, tree.getCentroid().getAzimuth(), TEST_EPS); } @Test void testRegionProperties_multipleIntervals_sameSize() { // arrange final RegionBSPTree1S tree = RegionBSPTree1S.empty(); tree.add(AngularInterval.of(0, 0.1, TEST_PRECISION)); tree.add(AngularInterval.of(0.2, 0.3, TEST_PRECISION)); // act/assert Assertions.assertEquals(0.2, tree.getSize(), TEST_EPS); Assertions.assertEquals(0, tree.getBoundarySize(), TEST_EPS); Assertions.assertEquals(0.15, tree.getCentroid().getAzimuth(), TEST_EPS); } @Test void testRegionProperties_multipleIntervals_differentSizes() { // arrange final RegionBSPTree1S tree = RegionBSPTree1S.empty(); tree.add(AngularInterval.of(0, 0.2, TEST_PRECISION)); tree.add(AngularInterval.of(0.3, 0.7, TEST_PRECISION)); // act/assert Assertions.assertEquals(0.6, tree.getSize(), TEST_EPS); Assertions.assertEquals(0, tree.getBoundarySize(), TEST_EPS); final Vector2D centroidVector = Point1S.of(0.1).getVector().withNorm(0.2) .add(Point1S.of(0.5).getVector().withNorm(0.4)); Assertions.assertEquals(Point1S.from(centroidVector).getAzimuth(), tree.getCentroid().getAzimuth(), TEST_EPS); } @Test void testRegionProperties_equalAndOppositeIntervals() { // arrange final RegionBSPTree1S tree = RegionBSPTree1S.empty(); tree.add(AngularInterval.of(-1, 1, TEST_PRECISION)); tree.add(AngularInterval.of(Math.PI - 1, Math.PI + 1, TEST_PRECISION)); // act/assert Assertions.assertEquals(4, tree.getSize(), TEST_EPS); Assertions.assertEquals(0, tree.getBoundarySize(), TEST_EPS); Assertions.assertNull(tree.getCentroid()); // no unique centroid exists } @Test void testTransform_fullAndEmpty() { // arrange final RegionBSPTree1S full = RegionBSPTree1S.full(); final RegionBSPTree1S empty = RegionBSPTree1S.empty(); // act full.transform(PI_MINUS_AZ); empty.transform(HALF_PI_PLUS_AZ); // assert Assertions.assertTrue(full.isFull()); Assertions.assertFalse(full.isEmpty()); Assertions.assertFalse(empty.isFull()); Assertions.assertTrue(empty.isEmpty()); } @Test void testTransform_halfPiPlusAz() { // arrange final RegionBSPTree1S tree = RegionBSPTree1S.empty(); tree.add(AngularInterval.of(-1, 1, TEST_PRECISION)); tree.add(AngularInterval.of(2, 3, TEST_PRECISION)); // act tree.transform(HALF_PI_PLUS_AZ); // assert Assertions.assertEquals(3, tree.getSize(), TEST_EPS); final List<AngularInterval> intervals = tree.toIntervals(); Assertions.assertEquals(2, intervals.size()); checkInterval(intervals.get(0), Angle.PI_OVER_TWO - 1, Angle.PI_OVER_TWO + 1); checkInterval(intervals.get(1), Angle.PI_OVER_TWO + 2, Angle.PI_OVER_TWO + 3); } @Test void testTransform_piMinusAz() { // arrange final RegionBSPTree1S tree = RegionBSPTree1S.empty(); tree.add(AngularInterval.of(-1, 1, TEST_PRECISION)); tree.add(AngularInterval.of(2, 3, TEST_PRECISION)); // act tree.transform(PI_MINUS_AZ); // assert Assertions.assertEquals(3, tree.getSize(), TEST_EPS); final List<AngularInterval> intervals = tree.toIntervals(); Assertions.assertEquals(2, intervals.size()); checkInterval(intervals.get(0), Math.PI - 3, Math.PI - 2); checkInterval(intervals.get(1), Math.PI - 1, Math.PI + 1); } @Test void testProject_fullAndEmpty() { // arrange final RegionBSPTree1S full = RegionBSPTree1S.full(); final RegionBSPTree1S empty = RegionBSPTree1S.empty(); // act/assert Assertions.assertNull(full.project(Point1S.ZERO)); Assertions.assertNull(full.project(Point1S.PI)); Assertions.assertNull(empty.project(Point1S.ZERO)); Assertions.assertNull(empty.project(Point1S.PI)); } @Test void testProject_withIntervals() { // arrange final RegionBSPTree1S tree = RegionBSPTree1S.empty(); tree.add(AngularInterval.of(-Angle.PI_OVER_TWO, Angle.PI_OVER_TWO, TEST_PRECISION)); tree.add(AngularInterval.of(Math.PI - 1, Math.PI + 1, TEST_PRECISION)); // act/assert Assertions.assertEquals(-Angle.PI_OVER_TWO, tree.project(Point1S.of(-Angle.PI_OVER_TWO - 0.1)).getAzimuth(), TEST_EPS); Assertions.assertEquals(-Angle.PI_OVER_TWO, tree.project(Point1S.of(-Angle.PI_OVER_TWO)).getAzimuth(), TEST_EPS); Assertions.assertEquals(-Angle.PI_OVER_TWO, tree.project(Point1S.of(-Angle.PI_OVER_TWO + 0.1)).getAzimuth(), TEST_EPS); Assertions.assertEquals(-Angle.PI_OVER_TWO, tree.project(Point1S.of(-0.1)).getAzimuth(), TEST_EPS); Assertions.assertEquals(Angle.PI_OVER_TWO, tree.project(Point1S.ZERO).getAzimuth(), TEST_EPS); Assertions.assertEquals(Angle.PI_OVER_TWO, tree.project(Point1S.of(0.1)).getAzimuth(), TEST_EPS); Assertions.assertEquals(Math.PI - 1, tree.project(Point1S.of(Math.PI - 0.5)).getAzimuth(), TEST_EPS); Assertions.assertEquals(Math.PI + 1, tree.project(Point1S.of(Math.PI + 0.5)).getAzimuth(), TEST_EPS); } @Test void testProject_equidistant() { // arrange final RegionBSPTree1S tree = AngularInterval.of(1, 2, TEST_PRECISION).toTree(); final RegionBSPTree1S treeComplement = tree.copy(); treeComplement.complement(); // act/assert Assertions.assertEquals(1, tree.project(Point1S.of(1.5)).getAzimuth(), TEST_EPS); Assertions.assertEquals(1, treeComplement.project(Point1S.of(1.5)).getAzimuth(), TEST_EPS); } @Test void testProject_intervalAroundZero_closerOnMinSide() { // arrange final double start = -1; final double end = 0.5; final RegionBSPTree1S tree = AngularInterval.of(start, end, TEST_PRECISION).toTree(); // act/assert Assertions.assertEquals(end, tree.project(Point1S.of(-1.5 * Math.PI)).getAzimuth(), TEST_EPS); Assertions.assertEquals(start, tree.project(Point1S.of(-Math.PI)).getAzimuth(), TEST_EPS); Assertions.assertEquals(start, tree.project(Point1S.of(-0.5 * Math.PI)).getAzimuth(), TEST_EPS); Assertions.assertEquals(start, tree.project(Point1S.of(-1)).getAzimuth(), TEST_EPS); Assertions.assertEquals(start, tree.project(Point1S.of(-0.5)).getAzimuth(), TEST_EPS); Assertions.assertEquals(end, tree.project(Point1S.of(-0.25)).getAzimuth(), TEST_EPS); Assertions.assertEquals(end, tree.project(Point1S.of(-0.1)).getAzimuth(), TEST_EPS); Assertions.assertEquals(end, tree.project(Point1S.ZERO).getAzimuth(), TEST_EPS); Assertions.assertEquals(end, tree.project(Point1S.of(0.1)).getAzimuth(), TEST_EPS); Assertions.assertEquals(end, tree.project(Point1S.of(0.25)).getAzimuth(), TEST_EPS); Assertions.assertEquals(end, tree.project(Point1S.of(0.5)).getAzimuth(), TEST_EPS); Assertions.assertEquals(end, tree.project(Point1S.of(0.75)).getAzimuth(), TEST_EPS); } @Test void testProject_intervalAroundZero_closerOnMaxSide() { // arrange final double start = -0.5; final double end = 1; final RegionBSPTree1S tree = AngularInterval.of(start, end, TEST_PRECISION).toTree(); // act/assert Assertions.assertEquals(end, tree.project(Point1S.of(-1.5 * Math.PI)).getAzimuth(), TEST_EPS); Assertions.assertEquals(end, tree.project(Point1S.of(-Math.PI)).getAzimuth(), TEST_EPS); Assertions.assertEquals(start, tree.project(Point1S.of(-0.5 * Math.PI)).getAzimuth(), TEST_EPS); Assertions.assertEquals(start, tree.project(Point1S.of(-1)).getAzimuth(), TEST_EPS); Assertions.assertEquals(start, tree.project(Point1S.of(-0.5)).getAzimuth(), TEST_EPS); Assertions.assertEquals(start, tree.project(Point1S.of(-0.25)).getAzimuth(), TEST_EPS); Assertions.assertEquals(start, tree.project(Point1S.of(-0.1)).getAzimuth(), TEST_EPS); Assertions.assertEquals(start, tree.project(Point1S.ZERO).getAzimuth(), TEST_EPS); Assertions.assertEquals(start, tree.project(Point1S.of(0.1)).getAzimuth(), TEST_EPS); Assertions.assertEquals(end, tree.project(Point1S.of(0.25)).getAzimuth(), TEST_EPS); Assertions.assertEquals(end, tree.project(Point1S.of(0.5)).getAzimuth(), TEST_EPS); Assertions.assertEquals(end, tree.project(Point1S.of(0.75)).getAzimuth(), TEST_EPS); } private static void checkSimpleSplit(final Split<RegionBSPTree1S> split, final AngularInterval minusInterval, final AngularInterval plusInterval) { final RegionBSPTree1S minus = split.getMinus(); if (minusInterval != null) { Assertions.assertNotNull(minus, "Expected minus region to not be null"); checkSingleInterval(minus, minusInterval.getMin(), minusInterval.getMax()); } else { Assertions.assertNull(minus, "Expected minus region to be null"); } final RegionBSPTree1S plus = split.getPlus(); if (plusInterval != null) { Assertions.assertNotNull(plus, "Expected plus region to not be null"); checkSingleInterval(plus, plusInterval.getMin(), plusInterval.getMax()); } else { Assertions.assertNull(plus, "Expected plus region to be null"); } } private static void checkSingleInterval(final RegionBSPTree1S tree, final double min, final double max) { final List<AngularInterval> intervals = tree.toIntervals(); Assertions.assertEquals(1, intervals.size(), "Expected a single interval in the tree"); checkInterval(intervals.get(0), min, max); } private static void checkInterval(final AngularInterval interval, final double min, final double max) { final double normalizedMin = Angle.Rad.WITHIN_0_AND_2PI.applyAsDouble(min); final double normalizedMax = Angle.Rad.WITHIN_0_AND_2PI.applyAsDouble(max); if (TEST_PRECISION.eq(normalizedMin, normalizedMax)) { Assertions.assertTrue(interval.isFull()); } else { Assertions.assertEquals(normalizedMin, interval.getMinBoundary().getPoint().getNormalizedAzimuth(), TEST_EPS); Assertions.assertEquals(normalizedMax, interval.getMaxBoundary().getPoint().getNormalizedAzimuth(), TEST_EPS); } } private static void checkClassify(final Region<Point1S> region, final RegionLocation loc, final double... pts) { for (final double pt : pts) { Assertions.assertEquals(loc, region.classify(Point1S.of(pt)), "Unexpected location for point " + pt); } } }
googleapis/google-api-java-client-services
36,069
clients/google-api-services-compute/beta/1.28.0/com/google/api/services/compute/model/Instance.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.compute.model; /** * Represents an Instance resource. * * An instance is a virtual machine that is hosted on Google Cloud Platform. For more information, * read Virtual Machine Instances. (== resource_for beta.instances ==) (== resource_for v1.instances * ==) * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class Instance extends com.google.api.client.json.GenericJson { /** * Allows this instance to send and receive packets with non-matching destination or source IPs. * This is required if you plan to use this instance to forward routes. For more information, see * Enabling IP Forwarding. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean canIpForward; /** * [Output Only] The CPU platform used by this instance. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String cpuPlatform; /** * [Output Only] Creation timestamp in RFC3339 text format. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String creationTimestamp; /** * Whether the resource should be protected against deletion. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean deletionProtection; /** * An optional description of this resource. Provide this property when you create the resource. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String description; /** * Array of disks associated with this instance. Persistent disks must be created before you can * assign them. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<AttachedDisk> disks; static { // hack to force ProGuard to consider AttachedDisk used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(AttachedDisk.class); } /** * Enables display device for the instance. * The value may be {@code null}. */ @com.google.api.client.util.Key private DisplayDevice displayDevice; /** * Specifies whether the disks restored from source snapshots or source machine image should erase * Windows specific VSS signature. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean eraseWindowsVssSignature; /** * A list of the type and count of accelerator cards attached to the instance. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<AcceleratorConfig> guestAccelerators; static { // hack to force ProGuard to consider AcceleratorConfig used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(AcceleratorConfig.class); } /** * Specifies the hostname of the instance. The specified hostname must be RFC1035 compliant. If * hostname is not specified, the default hostname is [INSTANCE_NAME].c.[PROJECT_ID].internal when * using the global DNS, and [INSTANCE_NAME].[ZONE].c.[PROJECT_ID].internal when using zonal DNS. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String hostname; /** * [Output Only] The unique identifier for the resource. This identifier is defined by the server. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.math.BigInteger id; /** * [Output Only] Type of the resource. Always compute#instance for instances. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String kind; /** * A fingerprint for this request, which is essentially a hash of the label's contents and used * for optimistic locking. The fingerprint is initially generated by Compute Engine and changes * after every request to modify or update labels. You must always provide an up-to-date * fingerprint hash in order to update or change labels. * * To see the latest fingerprint, make get() request to the instance. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String labelFingerprint; /** * Labels to apply to this instance. These can be later modified by the setLabels method. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.Map<String, java.lang.String> labels; /** * Full or partial URL of the machine type resource to use for this instance, in the format: * zones/zone/machineTypes/machine-type. This is provided by the client when the instance is * created. For example, the following is a valid partial url to a predefined machine type: zones * /us-central1-f/machineTypes/n1-standard-1 * * To create a custom machine type, provide a URL to a machine type in the following format, where * CPUS is 1 or an even number up to 32 (2, 4, 6, ... 24, etc), and MEMORY is the total memory for * this instance. Memory must be a multiple of 256 MB and must be supplied in MB (e.g. 5 GB of * memory is 5120 MB): zones/zone/machineTypes/custom-CPUS-MEMORY * * For example: zones/us-central1-f/machineTypes/custom-4-5120 * * For a full list of restrictions, read the Specifications for custom machine types. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String machineType; /** * The metadata key/value pairs assigned to this instance. This includes custom metadata and * predefined keys. * The value may be {@code null}. */ @com.google.api.client.util.Key private Metadata metadata; /** * Specifies a minimum CPU platform for the VM instance. Applicable values are the friendly names * of CPU platforms, such as minCpuPlatform: "Intel Haswell" or minCpuPlatform: "Intel Sandy * Bridge". * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String minCpuPlatform; /** * The name of the resource, provided by the client when initially creating the resource. The * resource name must be 1-63 characters long, and comply with RFC1035. Specifically, the name * must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` * which means the first character must be a lowercase letter, and all following characters must * be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String name; /** * An array of network configurations for this instance. These specify how interfaces are * configured to interact with other network services, such as connecting to the internet. * Multiple interfaces are supported per instance. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<NetworkInterface> networkInterfaces; /** * Specifies the reservations that this instance can consume from. * The value may be {@code null}. */ @com.google.api.client.util.Key private ReservationAffinity reservationAffinity; /** * Sets the scheduling options for this instance. * The value may be {@code null}. */ @com.google.api.client.util.Key private Scheduling scheduling; /** * [Output Only] Server-defined URL for this resource. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String selfLink; /** * A list of service accounts, with their specified scopes, authorized for this instance. Only one * service account per VM instance is supported. * * Service accounts generate access tokens that can be accessed through the metadata server and * used to authenticate applications on the instance. See Service Accounts for more information. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<ServiceAccount> serviceAccounts; /** * The value may be {@code null}. */ @com.google.api.client.util.Key private ShieldedInstanceConfig shieldedInstanceConfig; /** * The value may be {@code null}. */ @com.google.api.client.util.Key private ShieldedInstanceIntegrityPolicy shieldedInstanceIntegrityPolicy; /** * The value may be {@code null}. */ @com.google.api.client.util.Key private ShieldedVmConfig shieldedVmConfig; /** * The value may be {@code null}. */ @com.google.api.client.util.Key private ShieldedVmIntegrityPolicy shieldedVmIntegrityPolicy; /** * Source machine image * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String sourceMachineImage; /** * Source GMI encryption key when creating an instance from GMI. * The value may be {@code null}. */ @com.google.api.client.util.Key private CustomerEncryptionKey sourceMachineImageEncryptionKey; /** * [Output Only] Whether a VM has been restricted for start because Compute Engine has detected * suspicious activity. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean startRestricted; /** * [Output Only] The status of the instance. One of the following values: PROVISIONING, STAGING, * RUNNING, STOPPING, STOPPED, SUSPENDING, SUSPENDED, and TERMINATED. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String status; /** * [Output Only] An optional, human-readable explanation of the status. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String statusMessage; /** * Tags to apply to this instance. Tags are used to identify valid sources or targets for network * firewalls and are specified by the client during instance creation. The tags can be later * modified by the setTags method. Each tag within the list must comply with RFC1035. Multiple * tags can be specified via the 'tags.items' field. * The value may be {@code null}. */ @com.google.api.client.util.Key private Tags tags; /** * [Output Only] URL of the zone where the instance resides. You must specify this field as part * of the HTTP request URL. It is not settable as a field in the request body. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String zone; /** * Allows this instance to send and receive packets with non-matching destination or source IPs. * This is required if you plan to use this instance to forward routes. For more information, see * Enabling IP Forwarding. * @return value or {@code null} for none */ public java.lang.Boolean getCanIpForward() { return canIpForward; } /** * Allows this instance to send and receive packets with non-matching destination or source IPs. * This is required if you plan to use this instance to forward routes. For more information, see * Enabling IP Forwarding. * @param canIpForward canIpForward or {@code null} for none */ public Instance setCanIpForward(java.lang.Boolean canIpForward) { this.canIpForward = canIpForward; return this; } /** * [Output Only] The CPU platform used by this instance. * @return value or {@code null} for none */ public java.lang.String getCpuPlatform() { return cpuPlatform; } /** * [Output Only] The CPU platform used by this instance. * @param cpuPlatform cpuPlatform or {@code null} for none */ public Instance setCpuPlatform(java.lang.String cpuPlatform) { this.cpuPlatform = cpuPlatform; return this; } /** * [Output Only] Creation timestamp in RFC3339 text format. * @return value or {@code null} for none */ public java.lang.String getCreationTimestamp() { return creationTimestamp; } /** * [Output Only] Creation timestamp in RFC3339 text format. * @param creationTimestamp creationTimestamp or {@code null} for none */ public Instance setCreationTimestamp(java.lang.String creationTimestamp) { this.creationTimestamp = creationTimestamp; return this; } /** * Whether the resource should be protected against deletion. * @return value or {@code null} for none */ public java.lang.Boolean getDeletionProtection() { return deletionProtection; } /** * Whether the resource should be protected against deletion. * @param deletionProtection deletionProtection or {@code null} for none */ public Instance setDeletionProtection(java.lang.Boolean deletionProtection) { this.deletionProtection = deletionProtection; return this; } /** * An optional description of this resource. Provide this property when you create the resource. * @return value or {@code null} for none */ public java.lang.String getDescription() { return description; } /** * An optional description of this resource. Provide this property when you create the resource. * @param description description or {@code null} for none */ public Instance setDescription(java.lang.String description) { this.description = description; return this; } /** * Array of disks associated with this instance. Persistent disks must be created before you can * assign them. * @return value or {@code null} for none */ public java.util.List<AttachedDisk> getDisks() { return disks; } /** * Array of disks associated with this instance. Persistent disks must be created before you can * assign them. * @param disks disks or {@code null} for none */ public Instance setDisks(java.util.List<AttachedDisk> disks) { this.disks = disks; return this; } /** * Enables display device for the instance. * @return value or {@code null} for none */ public DisplayDevice getDisplayDevice() { return displayDevice; } /** * Enables display device for the instance. * @param displayDevice displayDevice or {@code null} for none */ public Instance setDisplayDevice(DisplayDevice displayDevice) { this.displayDevice = displayDevice; return this; } /** * Specifies whether the disks restored from source snapshots or source machine image should erase * Windows specific VSS signature. * @return value or {@code null} for none */ public java.lang.Boolean getEraseWindowsVssSignature() { return eraseWindowsVssSignature; } /** * Specifies whether the disks restored from source snapshots or source machine image should erase * Windows specific VSS signature. * @param eraseWindowsVssSignature eraseWindowsVssSignature or {@code null} for none */ public Instance setEraseWindowsVssSignature(java.lang.Boolean eraseWindowsVssSignature) { this.eraseWindowsVssSignature = eraseWindowsVssSignature; return this; } /** * A list of the type and count of accelerator cards attached to the instance. * @return value or {@code null} for none */ public java.util.List<AcceleratorConfig> getGuestAccelerators() { return guestAccelerators; } /** * A list of the type and count of accelerator cards attached to the instance. * @param guestAccelerators guestAccelerators or {@code null} for none */ public Instance setGuestAccelerators(java.util.List<AcceleratorConfig> guestAccelerators) { this.guestAccelerators = guestAccelerators; return this; } /** * Specifies the hostname of the instance. The specified hostname must be RFC1035 compliant. If * hostname is not specified, the default hostname is [INSTANCE_NAME].c.[PROJECT_ID].internal when * using the global DNS, and [INSTANCE_NAME].[ZONE].c.[PROJECT_ID].internal when using zonal DNS. * @return value or {@code null} for none */ public java.lang.String getHostname() { return hostname; } /** * Specifies the hostname of the instance. The specified hostname must be RFC1035 compliant. If * hostname is not specified, the default hostname is [INSTANCE_NAME].c.[PROJECT_ID].internal when * using the global DNS, and [INSTANCE_NAME].[ZONE].c.[PROJECT_ID].internal when using zonal DNS. * @param hostname hostname or {@code null} for none */ public Instance setHostname(java.lang.String hostname) { this.hostname = hostname; return this; } /** * [Output Only] The unique identifier for the resource. This identifier is defined by the server. * @return value or {@code null} for none */ public java.math.BigInteger getId() { return id; } /** * [Output Only] The unique identifier for the resource. This identifier is defined by the server. * @param id id or {@code null} for none */ public Instance setId(java.math.BigInteger id) { this.id = id; return this; } /** * [Output Only] Type of the resource. Always compute#instance for instances. * @return value or {@code null} for none */ public java.lang.String getKind() { return kind; } /** * [Output Only] Type of the resource. Always compute#instance for instances. * @param kind kind or {@code null} for none */ public Instance setKind(java.lang.String kind) { this.kind = kind; return this; } /** * A fingerprint for this request, which is essentially a hash of the label's contents and used * for optimistic locking. The fingerprint is initially generated by Compute Engine and changes * after every request to modify or update labels. You must always provide an up-to-date * fingerprint hash in order to update or change labels. * * To see the latest fingerprint, make get() request to the instance. * @see #decodeLabelFingerprint() * @return value or {@code null} for none */ public java.lang.String getLabelFingerprint() { return labelFingerprint; } /** * A fingerprint for this request, which is essentially a hash of the label's contents and used * for optimistic locking. The fingerprint is initially generated by Compute Engine and changes * after every request to modify or update labels. You must always provide an up-to-date * fingerprint hash in order to update or change labels. * * To see the latest fingerprint, make get() request to the instance. * @see #getLabelFingerprint() * @return Base64 decoded value or {@code null} for none * * @since 1.14 */ public byte[] decodeLabelFingerprint() { return com.google.api.client.util.Base64.decodeBase64(labelFingerprint); } /** * A fingerprint for this request, which is essentially a hash of the label's contents and used * for optimistic locking. The fingerprint is initially generated by Compute Engine and changes * after every request to modify or update labels. You must always provide an up-to-date * fingerprint hash in order to update or change labels. * * To see the latest fingerprint, make get() request to the instance. * @see #encodeLabelFingerprint() * @param labelFingerprint labelFingerprint or {@code null} for none */ public Instance setLabelFingerprint(java.lang.String labelFingerprint) { this.labelFingerprint = labelFingerprint; return this; } /** * A fingerprint for this request, which is essentially a hash of the label's contents and used * for optimistic locking. The fingerprint is initially generated by Compute Engine and changes * after every request to modify or update labels. You must always provide an up-to-date * fingerprint hash in order to update or change labels. * * To see the latest fingerprint, make get() request to the instance. * @see #setLabelFingerprint() * * <p> * The value is encoded Base64 or {@code null} for none. * </p> * * @since 1.14 */ public Instance encodeLabelFingerprint(byte[] labelFingerprint) { this.labelFingerprint = com.google.api.client.util.Base64.encodeBase64URLSafeString(labelFingerprint); return this; } /** * Labels to apply to this instance. These can be later modified by the setLabels method. * @return value or {@code null} for none */ public java.util.Map<String, java.lang.String> getLabels() { return labels; } /** * Labels to apply to this instance. These can be later modified by the setLabels method. * @param labels labels or {@code null} for none */ public Instance setLabels(java.util.Map<String, java.lang.String> labels) { this.labels = labels; return this; } /** * Full or partial URL of the machine type resource to use for this instance, in the format: * zones/zone/machineTypes/machine-type. This is provided by the client when the instance is * created. For example, the following is a valid partial url to a predefined machine type: zones * /us-central1-f/machineTypes/n1-standard-1 * * To create a custom machine type, provide a URL to a machine type in the following format, where * CPUS is 1 or an even number up to 32 (2, 4, 6, ... 24, etc), and MEMORY is the total memory for * this instance. Memory must be a multiple of 256 MB and must be supplied in MB (e.g. 5 GB of * memory is 5120 MB): zones/zone/machineTypes/custom-CPUS-MEMORY * * For example: zones/us-central1-f/machineTypes/custom-4-5120 * * For a full list of restrictions, read the Specifications for custom machine types. * @return value or {@code null} for none */ public java.lang.String getMachineType() { return machineType; } /** * Full or partial URL of the machine type resource to use for this instance, in the format: * zones/zone/machineTypes/machine-type. This is provided by the client when the instance is * created. For example, the following is a valid partial url to a predefined machine type: zones * /us-central1-f/machineTypes/n1-standard-1 * * To create a custom machine type, provide a URL to a machine type in the following format, where * CPUS is 1 or an even number up to 32 (2, 4, 6, ... 24, etc), and MEMORY is the total memory for * this instance. Memory must be a multiple of 256 MB and must be supplied in MB (e.g. 5 GB of * memory is 5120 MB): zones/zone/machineTypes/custom-CPUS-MEMORY * * For example: zones/us-central1-f/machineTypes/custom-4-5120 * * For a full list of restrictions, read the Specifications for custom machine types. * @param machineType machineType or {@code null} for none */ public Instance setMachineType(java.lang.String machineType) { this.machineType = machineType; return this; } /** * The metadata key/value pairs assigned to this instance. This includes custom metadata and * predefined keys. * @return value or {@code null} for none */ public Metadata getMetadata() { return metadata; } /** * The metadata key/value pairs assigned to this instance. This includes custom metadata and * predefined keys. * @param metadata metadata or {@code null} for none */ public Instance setMetadata(Metadata metadata) { this.metadata = metadata; return this; } /** * Specifies a minimum CPU platform for the VM instance. Applicable values are the friendly names * of CPU platforms, such as minCpuPlatform: "Intel Haswell" or minCpuPlatform: "Intel Sandy * Bridge". * @return value or {@code null} for none */ public java.lang.String getMinCpuPlatform() { return minCpuPlatform; } /** * Specifies a minimum CPU platform for the VM instance. Applicable values are the friendly names * of CPU platforms, such as minCpuPlatform: "Intel Haswell" or minCpuPlatform: "Intel Sandy * Bridge". * @param minCpuPlatform minCpuPlatform or {@code null} for none */ public Instance setMinCpuPlatform(java.lang.String minCpuPlatform) { this.minCpuPlatform = minCpuPlatform; return this; } /** * The name of the resource, provided by the client when initially creating the resource. The * resource name must be 1-63 characters long, and comply with RFC1035. Specifically, the name * must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` * which means the first character must be a lowercase letter, and all following characters must * be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. * @return value or {@code null} for none */ public java.lang.String getName() { return name; } /** * The name of the resource, provided by the client when initially creating the resource. The * resource name must be 1-63 characters long, and comply with RFC1035. Specifically, the name * must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` * which means the first character must be a lowercase letter, and all following characters must * be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. * @param name name or {@code null} for none */ public Instance setName(java.lang.String name) { this.name = name; return this; } /** * An array of network configurations for this instance. These specify how interfaces are * configured to interact with other network services, such as connecting to the internet. * Multiple interfaces are supported per instance. * @return value or {@code null} for none */ public java.util.List<NetworkInterface> getNetworkInterfaces() { return networkInterfaces; } /** * An array of network configurations for this instance. These specify how interfaces are * configured to interact with other network services, such as connecting to the internet. * Multiple interfaces are supported per instance. * @param networkInterfaces networkInterfaces or {@code null} for none */ public Instance setNetworkInterfaces(java.util.List<NetworkInterface> networkInterfaces) { this.networkInterfaces = networkInterfaces; return this; } /** * Specifies the reservations that this instance can consume from. * @return value or {@code null} for none */ public ReservationAffinity getReservationAffinity() { return reservationAffinity; } /** * Specifies the reservations that this instance can consume from. * @param reservationAffinity reservationAffinity or {@code null} for none */ public Instance setReservationAffinity(ReservationAffinity reservationAffinity) { this.reservationAffinity = reservationAffinity; return this; } /** * Sets the scheduling options for this instance. * @return value or {@code null} for none */ public Scheduling getScheduling() { return scheduling; } /** * Sets the scheduling options for this instance. * @param scheduling scheduling or {@code null} for none */ public Instance setScheduling(Scheduling scheduling) { this.scheduling = scheduling; return this; } /** * [Output Only] Server-defined URL for this resource. * @return value or {@code null} for none */ public java.lang.String getSelfLink() { return selfLink; } /** * [Output Only] Server-defined URL for this resource. * @param selfLink selfLink or {@code null} for none */ public Instance setSelfLink(java.lang.String selfLink) { this.selfLink = selfLink; return this; } /** * A list of service accounts, with their specified scopes, authorized for this instance. Only one * service account per VM instance is supported. * * Service accounts generate access tokens that can be accessed through the metadata server and * used to authenticate applications on the instance. See Service Accounts for more information. * @return value or {@code null} for none */ public java.util.List<ServiceAccount> getServiceAccounts() { return serviceAccounts; } /** * A list of service accounts, with their specified scopes, authorized for this instance. Only one * service account per VM instance is supported. * * Service accounts generate access tokens that can be accessed through the metadata server and * used to authenticate applications on the instance. See Service Accounts for more information. * @param serviceAccounts serviceAccounts or {@code null} for none */ public Instance setServiceAccounts(java.util.List<ServiceAccount> serviceAccounts) { this.serviceAccounts = serviceAccounts; return this; } /** * @return value or {@code null} for none */ public ShieldedInstanceConfig getShieldedInstanceConfig() { return shieldedInstanceConfig; } /** * @param shieldedInstanceConfig shieldedInstanceConfig or {@code null} for none */ public Instance setShieldedInstanceConfig(ShieldedInstanceConfig shieldedInstanceConfig) { this.shieldedInstanceConfig = shieldedInstanceConfig; return this; } /** * @return value or {@code null} for none */ public ShieldedInstanceIntegrityPolicy getShieldedInstanceIntegrityPolicy() { return shieldedInstanceIntegrityPolicy; } /** * @param shieldedInstanceIntegrityPolicy shieldedInstanceIntegrityPolicy or {@code null} for none */ public Instance setShieldedInstanceIntegrityPolicy(ShieldedInstanceIntegrityPolicy shieldedInstanceIntegrityPolicy) { this.shieldedInstanceIntegrityPolicy = shieldedInstanceIntegrityPolicy; return this; } /** * @return value or {@code null} for none */ public ShieldedVmConfig getShieldedVmConfig() { return shieldedVmConfig; } /** * @param shieldedVmConfig shieldedVmConfig or {@code null} for none */ public Instance setShieldedVmConfig(ShieldedVmConfig shieldedVmConfig) { this.shieldedVmConfig = shieldedVmConfig; return this; } /** * @return value or {@code null} for none */ public ShieldedVmIntegrityPolicy getShieldedVmIntegrityPolicy() { return shieldedVmIntegrityPolicy; } /** * @param shieldedVmIntegrityPolicy shieldedVmIntegrityPolicy or {@code null} for none */ public Instance setShieldedVmIntegrityPolicy(ShieldedVmIntegrityPolicy shieldedVmIntegrityPolicy) { this.shieldedVmIntegrityPolicy = shieldedVmIntegrityPolicy; return this; } /** * Source machine image * @return value or {@code null} for none */ public java.lang.String getSourceMachineImage() { return sourceMachineImage; } /** * Source machine image * @param sourceMachineImage sourceMachineImage or {@code null} for none */ public Instance setSourceMachineImage(java.lang.String sourceMachineImage) { this.sourceMachineImage = sourceMachineImage; return this; } /** * Source GMI encryption key when creating an instance from GMI. * @return value or {@code null} for none */ public CustomerEncryptionKey getSourceMachineImageEncryptionKey() { return sourceMachineImageEncryptionKey; } /** * Source GMI encryption key when creating an instance from GMI. * @param sourceMachineImageEncryptionKey sourceMachineImageEncryptionKey or {@code null} for none */ public Instance setSourceMachineImageEncryptionKey(CustomerEncryptionKey sourceMachineImageEncryptionKey) { this.sourceMachineImageEncryptionKey = sourceMachineImageEncryptionKey; return this; } /** * [Output Only] Whether a VM has been restricted for start because Compute Engine has detected * suspicious activity. * @return value or {@code null} for none */ public java.lang.Boolean getStartRestricted() { return startRestricted; } /** * [Output Only] Whether a VM has been restricted for start because Compute Engine has detected * suspicious activity. * @param startRestricted startRestricted or {@code null} for none */ public Instance setStartRestricted(java.lang.Boolean startRestricted) { this.startRestricted = startRestricted; return this; } /** * [Output Only] The status of the instance. One of the following values: PROVISIONING, STAGING, * RUNNING, STOPPING, STOPPED, SUSPENDING, SUSPENDED, and TERMINATED. * @return value or {@code null} for none */ public java.lang.String getStatus() { return status; } /** * [Output Only] The status of the instance. One of the following values: PROVISIONING, STAGING, * RUNNING, STOPPING, STOPPED, SUSPENDING, SUSPENDED, and TERMINATED. * @param status status or {@code null} for none */ public Instance setStatus(java.lang.String status) { this.status = status; return this; } /** * [Output Only] An optional, human-readable explanation of the status. * @return value or {@code null} for none */ public java.lang.String getStatusMessage() { return statusMessage; } /** * [Output Only] An optional, human-readable explanation of the status. * @param statusMessage statusMessage or {@code null} for none */ public Instance setStatusMessage(java.lang.String statusMessage) { this.statusMessage = statusMessage; return this; } /** * Tags to apply to this instance. Tags are used to identify valid sources or targets for network * firewalls and are specified by the client during instance creation. The tags can be later * modified by the setTags method. Each tag within the list must comply with RFC1035. Multiple * tags can be specified via the 'tags.items' field. * @return value or {@code null} for none */ public Tags getTags() { return tags; } /** * Tags to apply to this instance. Tags are used to identify valid sources or targets for network * firewalls and are specified by the client during instance creation. The tags can be later * modified by the setTags method. Each tag within the list must comply with RFC1035. Multiple * tags can be specified via the 'tags.items' field. * @param tags tags or {@code null} for none */ public Instance setTags(Tags tags) { this.tags = tags; return this; } /** * [Output Only] URL of the zone where the instance resides. You must specify this field as part * of the HTTP request URL. It is not settable as a field in the request body. * @return value or {@code null} for none */ public java.lang.String getZone() { return zone; } /** * [Output Only] URL of the zone where the instance resides. You must specify this field as part * of the HTTP request URL. It is not settable as a field in the request body. * @param zone zone or {@code null} for none */ public Instance setZone(java.lang.String zone) { this.zone = zone; return this; } @Override public Instance set(String fieldName, Object value) { return (Instance) super.set(fieldName, value); } @Override public Instance clone() { return (Instance) super.clone(); } }
googleapis/google-api-java-client-services
36,069
clients/google-api-services-compute/beta/1.29.2/com/google/api/services/compute/model/Instance.java
/* * Licensed under the Apache License, Version 2.0 (the "License"); you may not use this file except * in compliance with the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ /* * This code was generated by https://github.com/googleapis/google-api-java-client-services/ * Modify at your own risk. */ package com.google.api.services.compute.model; /** * Represents an Instance resource. * * An instance is a virtual machine that is hosted on Google Cloud Platform. For more information, * read Virtual Machine Instances. (== resource_for beta.instances ==) (== resource_for v1.instances * ==) * * <p> This is the Java data model class that specifies how to parse/serialize into the JSON that is * transmitted over HTTP when working with the Compute Engine API. For a detailed explanation see: * <a href="https://developers.google.com/api-client-library/java/google-http-java-client/json">https://developers.google.com/api-client-library/java/google-http-java-client/json</a> * </p> * * @author Google, Inc. */ @SuppressWarnings("javadoc") public final class Instance extends com.google.api.client.json.GenericJson { /** * Allows this instance to send and receive packets with non-matching destination or source IPs. * This is required if you plan to use this instance to forward routes. For more information, see * Enabling IP Forwarding. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean canIpForward; /** * [Output Only] The CPU platform used by this instance. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String cpuPlatform; /** * [Output Only] Creation timestamp in RFC3339 text format. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String creationTimestamp; /** * Whether the resource should be protected against deletion. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean deletionProtection; /** * An optional description of this resource. Provide this property when you create the resource. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String description; /** * Array of disks associated with this instance. Persistent disks must be created before you can * assign them. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<AttachedDisk> disks; static { // hack to force ProGuard to consider AttachedDisk used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(AttachedDisk.class); } /** * Enables display device for the instance. * The value may be {@code null}. */ @com.google.api.client.util.Key private DisplayDevice displayDevice; /** * Specifies whether the disks restored from source snapshots or source machine image should erase * Windows specific VSS signature. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean eraseWindowsVssSignature; /** * A list of the type and count of accelerator cards attached to the instance. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<AcceleratorConfig> guestAccelerators; static { // hack to force ProGuard to consider AcceleratorConfig used, since otherwise it would be stripped out // see https://github.com/google/google-api-java-client/issues/543 com.google.api.client.util.Data.nullOf(AcceleratorConfig.class); } /** * Specifies the hostname of the instance. The specified hostname must be RFC1035 compliant. If * hostname is not specified, the default hostname is [INSTANCE_NAME].c.[PROJECT_ID].internal when * using the global DNS, and [INSTANCE_NAME].[ZONE].c.[PROJECT_ID].internal when using zonal DNS. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String hostname; /** * [Output Only] The unique identifier for the resource. This identifier is defined by the server. * The value may be {@code null}. */ @com.google.api.client.util.Key @com.google.api.client.json.JsonString private java.math.BigInteger id; /** * [Output Only] Type of the resource. Always compute#instance for instances. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String kind; /** * A fingerprint for this request, which is essentially a hash of the label's contents and used * for optimistic locking. The fingerprint is initially generated by Compute Engine and changes * after every request to modify or update labels. You must always provide an up-to-date * fingerprint hash in order to update or change labels. * * To see the latest fingerprint, make get() request to the instance. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String labelFingerprint; /** * Labels to apply to this instance. These can be later modified by the setLabels method. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.Map<String, java.lang.String> labels; /** * Full or partial URL of the machine type resource to use for this instance, in the format: * zones/zone/machineTypes/machine-type. This is provided by the client when the instance is * created. For example, the following is a valid partial url to a predefined machine type: zones * /us-central1-f/machineTypes/n1-standard-1 * * To create a custom machine type, provide a URL to a machine type in the following format, where * CPUS is 1 or an even number up to 32 (2, 4, 6, ... 24, etc), and MEMORY is the total memory for * this instance. Memory must be a multiple of 256 MB and must be supplied in MB (e.g. 5 GB of * memory is 5120 MB): zones/zone/machineTypes/custom-CPUS-MEMORY * * For example: zones/us-central1-f/machineTypes/custom-4-5120 * * For a full list of restrictions, read the Specifications for custom machine types. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String machineType; /** * The metadata key/value pairs assigned to this instance. This includes custom metadata and * predefined keys. * The value may be {@code null}. */ @com.google.api.client.util.Key private Metadata metadata; /** * Specifies a minimum CPU platform for the VM instance. Applicable values are the friendly names * of CPU platforms, such as minCpuPlatform: "Intel Haswell" or minCpuPlatform: "Intel Sandy * Bridge". * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String minCpuPlatform; /** * The name of the resource, provided by the client when initially creating the resource. The * resource name must be 1-63 characters long, and comply with RFC1035. Specifically, the name * must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` * which means the first character must be a lowercase letter, and all following characters must * be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String name; /** * An array of network configurations for this instance. These specify how interfaces are * configured to interact with other network services, such as connecting to the internet. * Multiple interfaces are supported per instance. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<NetworkInterface> networkInterfaces; /** * Specifies the reservations that this instance can consume from. * The value may be {@code null}. */ @com.google.api.client.util.Key private ReservationAffinity reservationAffinity; /** * Sets the scheduling options for this instance. * The value may be {@code null}. */ @com.google.api.client.util.Key private Scheduling scheduling; /** * [Output Only] Server-defined URL for this resource. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String selfLink; /** * A list of service accounts, with their specified scopes, authorized for this instance. Only one * service account per VM instance is supported. * * Service accounts generate access tokens that can be accessed through the metadata server and * used to authenticate applications on the instance. See Service Accounts for more information. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.util.List<ServiceAccount> serviceAccounts; /** * The value may be {@code null}. */ @com.google.api.client.util.Key private ShieldedInstanceConfig shieldedInstanceConfig; /** * The value may be {@code null}. */ @com.google.api.client.util.Key private ShieldedInstanceIntegrityPolicy shieldedInstanceIntegrityPolicy; /** * The value may be {@code null}. */ @com.google.api.client.util.Key private ShieldedVmConfig shieldedVmConfig; /** * The value may be {@code null}. */ @com.google.api.client.util.Key private ShieldedVmIntegrityPolicy shieldedVmIntegrityPolicy; /** * Source machine image * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String sourceMachineImage; /** * Source GMI encryption key when creating an instance from GMI. * The value may be {@code null}. */ @com.google.api.client.util.Key private CustomerEncryptionKey sourceMachineImageEncryptionKey; /** * [Output Only] Whether a VM has been restricted for start because Compute Engine has detected * suspicious activity. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.Boolean startRestricted; /** * [Output Only] The status of the instance. One of the following values: PROVISIONING, STAGING, * RUNNING, STOPPING, STOPPED, SUSPENDING, SUSPENDED, and TERMINATED. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String status; /** * [Output Only] An optional, human-readable explanation of the status. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String statusMessage; /** * Tags to apply to this instance. Tags are used to identify valid sources or targets for network * firewalls and are specified by the client during instance creation. The tags can be later * modified by the setTags method. Each tag within the list must comply with RFC1035. Multiple * tags can be specified via the 'tags.items' field. * The value may be {@code null}. */ @com.google.api.client.util.Key private Tags tags; /** * [Output Only] URL of the zone where the instance resides. You must specify this field as part * of the HTTP request URL. It is not settable as a field in the request body. * The value may be {@code null}. */ @com.google.api.client.util.Key private java.lang.String zone; /** * Allows this instance to send and receive packets with non-matching destination or source IPs. * This is required if you plan to use this instance to forward routes. For more information, see * Enabling IP Forwarding. * @return value or {@code null} for none */ public java.lang.Boolean getCanIpForward() { return canIpForward; } /** * Allows this instance to send and receive packets with non-matching destination or source IPs. * This is required if you plan to use this instance to forward routes. For more information, see * Enabling IP Forwarding. * @param canIpForward canIpForward or {@code null} for none */ public Instance setCanIpForward(java.lang.Boolean canIpForward) { this.canIpForward = canIpForward; return this; } /** * [Output Only] The CPU platform used by this instance. * @return value or {@code null} for none */ public java.lang.String getCpuPlatform() { return cpuPlatform; } /** * [Output Only] The CPU platform used by this instance. * @param cpuPlatform cpuPlatform or {@code null} for none */ public Instance setCpuPlatform(java.lang.String cpuPlatform) { this.cpuPlatform = cpuPlatform; return this; } /** * [Output Only] Creation timestamp in RFC3339 text format. * @return value or {@code null} for none */ public java.lang.String getCreationTimestamp() { return creationTimestamp; } /** * [Output Only] Creation timestamp in RFC3339 text format. * @param creationTimestamp creationTimestamp or {@code null} for none */ public Instance setCreationTimestamp(java.lang.String creationTimestamp) { this.creationTimestamp = creationTimestamp; return this; } /** * Whether the resource should be protected against deletion. * @return value or {@code null} for none */ public java.lang.Boolean getDeletionProtection() { return deletionProtection; } /** * Whether the resource should be protected against deletion. * @param deletionProtection deletionProtection or {@code null} for none */ public Instance setDeletionProtection(java.lang.Boolean deletionProtection) { this.deletionProtection = deletionProtection; return this; } /** * An optional description of this resource. Provide this property when you create the resource. * @return value or {@code null} for none */ public java.lang.String getDescription() { return description; } /** * An optional description of this resource. Provide this property when you create the resource. * @param description description or {@code null} for none */ public Instance setDescription(java.lang.String description) { this.description = description; return this; } /** * Array of disks associated with this instance. Persistent disks must be created before you can * assign them. * @return value or {@code null} for none */ public java.util.List<AttachedDisk> getDisks() { return disks; } /** * Array of disks associated with this instance. Persistent disks must be created before you can * assign them. * @param disks disks or {@code null} for none */ public Instance setDisks(java.util.List<AttachedDisk> disks) { this.disks = disks; return this; } /** * Enables display device for the instance. * @return value or {@code null} for none */ public DisplayDevice getDisplayDevice() { return displayDevice; } /** * Enables display device for the instance. * @param displayDevice displayDevice or {@code null} for none */ public Instance setDisplayDevice(DisplayDevice displayDevice) { this.displayDevice = displayDevice; return this; } /** * Specifies whether the disks restored from source snapshots or source machine image should erase * Windows specific VSS signature. * @return value or {@code null} for none */ public java.lang.Boolean getEraseWindowsVssSignature() { return eraseWindowsVssSignature; } /** * Specifies whether the disks restored from source snapshots or source machine image should erase * Windows specific VSS signature. * @param eraseWindowsVssSignature eraseWindowsVssSignature or {@code null} for none */ public Instance setEraseWindowsVssSignature(java.lang.Boolean eraseWindowsVssSignature) { this.eraseWindowsVssSignature = eraseWindowsVssSignature; return this; } /** * A list of the type and count of accelerator cards attached to the instance. * @return value or {@code null} for none */ public java.util.List<AcceleratorConfig> getGuestAccelerators() { return guestAccelerators; } /** * A list of the type and count of accelerator cards attached to the instance. * @param guestAccelerators guestAccelerators or {@code null} for none */ public Instance setGuestAccelerators(java.util.List<AcceleratorConfig> guestAccelerators) { this.guestAccelerators = guestAccelerators; return this; } /** * Specifies the hostname of the instance. The specified hostname must be RFC1035 compliant. If * hostname is not specified, the default hostname is [INSTANCE_NAME].c.[PROJECT_ID].internal when * using the global DNS, and [INSTANCE_NAME].[ZONE].c.[PROJECT_ID].internal when using zonal DNS. * @return value or {@code null} for none */ public java.lang.String getHostname() { return hostname; } /** * Specifies the hostname of the instance. The specified hostname must be RFC1035 compliant. If * hostname is not specified, the default hostname is [INSTANCE_NAME].c.[PROJECT_ID].internal when * using the global DNS, and [INSTANCE_NAME].[ZONE].c.[PROJECT_ID].internal when using zonal DNS. * @param hostname hostname or {@code null} for none */ public Instance setHostname(java.lang.String hostname) { this.hostname = hostname; return this; } /** * [Output Only] The unique identifier for the resource. This identifier is defined by the server. * @return value or {@code null} for none */ public java.math.BigInteger getId() { return id; } /** * [Output Only] The unique identifier for the resource. This identifier is defined by the server. * @param id id or {@code null} for none */ public Instance setId(java.math.BigInteger id) { this.id = id; return this; } /** * [Output Only] Type of the resource. Always compute#instance for instances. * @return value or {@code null} for none */ public java.lang.String getKind() { return kind; } /** * [Output Only] Type of the resource. Always compute#instance for instances. * @param kind kind or {@code null} for none */ public Instance setKind(java.lang.String kind) { this.kind = kind; return this; } /** * A fingerprint for this request, which is essentially a hash of the label's contents and used * for optimistic locking. The fingerprint is initially generated by Compute Engine and changes * after every request to modify or update labels. You must always provide an up-to-date * fingerprint hash in order to update or change labels. * * To see the latest fingerprint, make get() request to the instance. * @see #decodeLabelFingerprint() * @return value or {@code null} for none */ public java.lang.String getLabelFingerprint() { return labelFingerprint; } /** * A fingerprint for this request, which is essentially a hash of the label's contents and used * for optimistic locking. The fingerprint is initially generated by Compute Engine and changes * after every request to modify or update labels. You must always provide an up-to-date * fingerprint hash in order to update or change labels. * * To see the latest fingerprint, make get() request to the instance. * @see #getLabelFingerprint() * @return Base64 decoded value or {@code null} for none * * @since 1.14 */ public byte[] decodeLabelFingerprint() { return com.google.api.client.util.Base64.decodeBase64(labelFingerprint); } /** * A fingerprint for this request, which is essentially a hash of the label's contents and used * for optimistic locking. The fingerprint is initially generated by Compute Engine and changes * after every request to modify or update labels. You must always provide an up-to-date * fingerprint hash in order to update or change labels. * * To see the latest fingerprint, make get() request to the instance. * @see #encodeLabelFingerprint() * @param labelFingerprint labelFingerprint or {@code null} for none */ public Instance setLabelFingerprint(java.lang.String labelFingerprint) { this.labelFingerprint = labelFingerprint; return this; } /** * A fingerprint for this request, which is essentially a hash of the label's contents and used * for optimistic locking. The fingerprint is initially generated by Compute Engine and changes * after every request to modify or update labels. You must always provide an up-to-date * fingerprint hash in order to update or change labels. * * To see the latest fingerprint, make get() request to the instance. * @see #setLabelFingerprint() * * <p> * The value is encoded Base64 or {@code null} for none. * </p> * * @since 1.14 */ public Instance encodeLabelFingerprint(byte[] labelFingerprint) { this.labelFingerprint = com.google.api.client.util.Base64.encodeBase64URLSafeString(labelFingerprint); return this; } /** * Labels to apply to this instance. These can be later modified by the setLabels method. * @return value or {@code null} for none */ public java.util.Map<String, java.lang.String> getLabels() { return labels; } /** * Labels to apply to this instance. These can be later modified by the setLabels method. * @param labels labels or {@code null} for none */ public Instance setLabels(java.util.Map<String, java.lang.String> labels) { this.labels = labels; return this; } /** * Full or partial URL of the machine type resource to use for this instance, in the format: * zones/zone/machineTypes/machine-type. This is provided by the client when the instance is * created. For example, the following is a valid partial url to a predefined machine type: zones * /us-central1-f/machineTypes/n1-standard-1 * * To create a custom machine type, provide a URL to a machine type in the following format, where * CPUS is 1 or an even number up to 32 (2, 4, 6, ... 24, etc), and MEMORY is the total memory for * this instance. Memory must be a multiple of 256 MB and must be supplied in MB (e.g. 5 GB of * memory is 5120 MB): zones/zone/machineTypes/custom-CPUS-MEMORY * * For example: zones/us-central1-f/machineTypes/custom-4-5120 * * For a full list of restrictions, read the Specifications for custom machine types. * @return value or {@code null} for none */ public java.lang.String getMachineType() { return machineType; } /** * Full or partial URL of the machine type resource to use for this instance, in the format: * zones/zone/machineTypes/machine-type. This is provided by the client when the instance is * created. For example, the following is a valid partial url to a predefined machine type: zones * /us-central1-f/machineTypes/n1-standard-1 * * To create a custom machine type, provide a URL to a machine type in the following format, where * CPUS is 1 or an even number up to 32 (2, 4, 6, ... 24, etc), and MEMORY is the total memory for * this instance. Memory must be a multiple of 256 MB and must be supplied in MB (e.g. 5 GB of * memory is 5120 MB): zones/zone/machineTypes/custom-CPUS-MEMORY * * For example: zones/us-central1-f/machineTypes/custom-4-5120 * * For a full list of restrictions, read the Specifications for custom machine types. * @param machineType machineType or {@code null} for none */ public Instance setMachineType(java.lang.String machineType) { this.machineType = machineType; return this; } /** * The metadata key/value pairs assigned to this instance. This includes custom metadata and * predefined keys. * @return value or {@code null} for none */ public Metadata getMetadata() { return metadata; } /** * The metadata key/value pairs assigned to this instance. This includes custom metadata and * predefined keys. * @param metadata metadata or {@code null} for none */ public Instance setMetadata(Metadata metadata) { this.metadata = metadata; return this; } /** * Specifies a minimum CPU platform for the VM instance. Applicable values are the friendly names * of CPU platforms, such as minCpuPlatform: "Intel Haswell" or minCpuPlatform: "Intel Sandy * Bridge". * @return value or {@code null} for none */ public java.lang.String getMinCpuPlatform() { return minCpuPlatform; } /** * Specifies a minimum CPU platform for the VM instance. Applicable values are the friendly names * of CPU platforms, such as minCpuPlatform: "Intel Haswell" or minCpuPlatform: "Intel Sandy * Bridge". * @param minCpuPlatform minCpuPlatform or {@code null} for none */ public Instance setMinCpuPlatform(java.lang.String minCpuPlatform) { this.minCpuPlatform = minCpuPlatform; return this; } /** * The name of the resource, provided by the client when initially creating the resource. The * resource name must be 1-63 characters long, and comply with RFC1035. Specifically, the name * must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` * which means the first character must be a lowercase letter, and all following characters must * be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. * @return value or {@code null} for none */ public java.lang.String getName() { return name; } /** * The name of the resource, provided by the client when initially creating the resource. The * resource name must be 1-63 characters long, and comply with RFC1035. Specifically, the name * must be 1-63 characters long and match the regular expression `[a-z]([-a-z0-9]*[a-z0-9])?` * which means the first character must be a lowercase letter, and all following characters must * be a dash, lowercase letter, or digit, except the last character, which cannot be a dash. * @param name name or {@code null} for none */ public Instance setName(java.lang.String name) { this.name = name; return this; } /** * An array of network configurations for this instance. These specify how interfaces are * configured to interact with other network services, such as connecting to the internet. * Multiple interfaces are supported per instance. * @return value or {@code null} for none */ public java.util.List<NetworkInterface> getNetworkInterfaces() { return networkInterfaces; } /** * An array of network configurations for this instance. These specify how interfaces are * configured to interact with other network services, such as connecting to the internet. * Multiple interfaces are supported per instance. * @param networkInterfaces networkInterfaces or {@code null} for none */ public Instance setNetworkInterfaces(java.util.List<NetworkInterface> networkInterfaces) { this.networkInterfaces = networkInterfaces; return this; } /** * Specifies the reservations that this instance can consume from. * @return value or {@code null} for none */ public ReservationAffinity getReservationAffinity() { return reservationAffinity; } /** * Specifies the reservations that this instance can consume from. * @param reservationAffinity reservationAffinity or {@code null} for none */ public Instance setReservationAffinity(ReservationAffinity reservationAffinity) { this.reservationAffinity = reservationAffinity; return this; } /** * Sets the scheduling options for this instance. * @return value or {@code null} for none */ public Scheduling getScheduling() { return scheduling; } /** * Sets the scheduling options for this instance. * @param scheduling scheduling or {@code null} for none */ public Instance setScheduling(Scheduling scheduling) { this.scheduling = scheduling; return this; } /** * [Output Only] Server-defined URL for this resource. * @return value or {@code null} for none */ public java.lang.String getSelfLink() { return selfLink; } /** * [Output Only] Server-defined URL for this resource. * @param selfLink selfLink or {@code null} for none */ public Instance setSelfLink(java.lang.String selfLink) { this.selfLink = selfLink; return this; } /** * A list of service accounts, with their specified scopes, authorized for this instance. Only one * service account per VM instance is supported. * * Service accounts generate access tokens that can be accessed through the metadata server and * used to authenticate applications on the instance. See Service Accounts for more information. * @return value or {@code null} for none */ public java.util.List<ServiceAccount> getServiceAccounts() { return serviceAccounts; } /** * A list of service accounts, with their specified scopes, authorized for this instance. Only one * service account per VM instance is supported. * * Service accounts generate access tokens that can be accessed through the metadata server and * used to authenticate applications on the instance. See Service Accounts for more information. * @param serviceAccounts serviceAccounts or {@code null} for none */ public Instance setServiceAccounts(java.util.List<ServiceAccount> serviceAccounts) { this.serviceAccounts = serviceAccounts; return this; } /** * @return value or {@code null} for none */ public ShieldedInstanceConfig getShieldedInstanceConfig() { return shieldedInstanceConfig; } /** * @param shieldedInstanceConfig shieldedInstanceConfig or {@code null} for none */ public Instance setShieldedInstanceConfig(ShieldedInstanceConfig shieldedInstanceConfig) { this.shieldedInstanceConfig = shieldedInstanceConfig; return this; } /** * @return value or {@code null} for none */ public ShieldedInstanceIntegrityPolicy getShieldedInstanceIntegrityPolicy() { return shieldedInstanceIntegrityPolicy; } /** * @param shieldedInstanceIntegrityPolicy shieldedInstanceIntegrityPolicy or {@code null} for none */ public Instance setShieldedInstanceIntegrityPolicy(ShieldedInstanceIntegrityPolicy shieldedInstanceIntegrityPolicy) { this.shieldedInstanceIntegrityPolicy = shieldedInstanceIntegrityPolicy; return this; } /** * @return value or {@code null} for none */ public ShieldedVmConfig getShieldedVmConfig() { return shieldedVmConfig; } /** * @param shieldedVmConfig shieldedVmConfig or {@code null} for none */ public Instance setShieldedVmConfig(ShieldedVmConfig shieldedVmConfig) { this.shieldedVmConfig = shieldedVmConfig; return this; } /** * @return value or {@code null} for none */ public ShieldedVmIntegrityPolicy getShieldedVmIntegrityPolicy() { return shieldedVmIntegrityPolicy; } /** * @param shieldedVmIntegrityPolicy shieldedVmIntegrityPolicy or {@code null} for none */ public Instance setShieldedVmIntegrityPolicy(ShieldedVmIntegrityPolicy shieldedVmIntegrityPolicy) { this.shieldedVmIntegrityPolicy = shieldedVmIntegrityPolicy; return this; } /** * Source machine image * @return value or {@code null} for none */ public java.lang.String getSourceMachineImage() { return sourceMachineImage; } /** * Source machine image * @param sourceMachineImage sourceMachineImage or {@code null} for none */ public Instance setSourceMachineImage(java.lang.String sourceMachineImage) { this.sourceMachineImage = sourceMachineImage; return this; } /** * Source GMI encryption key when creating an instance from GMI. * @return value or {@code null} for none */ public CustomerEncryptionKey getSourceMachineImageEncryptionKey() { return sourceMachineImageEncryptionKey; } /** * Source GMI encryption key when creating an instance from GMI. * @param sourceMachineImageEncryptionKey sourceMachineImageEncryptionKey or {@code null} for none */ public Instance setSourceMachineImageEncryptionKey(CustomerEncryptionKey sourceMachineImageEncryptionKey) { this.sourceMachineImageEncryptionKey = sourceMachineImageEncryptionKey; return this; } /** * [Output Only] Whether a VM has been restricted for start because Compute Engine has detected * suspicious activity. * @return value or {@code null} for none */ public java.lang.Boolean getStartRestricted() { return startRestricted; } /** * [Output Only] Whether a VM has been restricted for start because Compute Engine has detected * suspicious activity. * @param startRestricted startRestricted or {@code null} for none */ public Instance setStartRestricted(java.lang.Boolean startRestricted) { this.startRestricted = startRestricted; return this; } /** * [Output Only] The status of the instance. One of the following values: PROVISIONING, STAGING, * RUNNING, STOPPING, STOPPED, SUSPENDING, SUSPENDED, and TERMINATED. * @return value or {@code null} for none */ public java.lang.String getStatus() { return status; } /** * [Output Only] The status of the instance. One of the following values: PROVISIONING, STAGING, * RUNNING, STOPPING, STOPPED, SUSPENDING, SUSPENDED, and TERMINATED. * @param status status or {@code null} for none */ public Instance setStatus(java.lang.String status) { this.status = status; return this; } /** * [Output Only] An optional, human-readable explanation of the status. * @return value or {@code null} for none */ public java.lang.String getStatusMessage() { return statusMessage; } /** * [Output Only] An optional, human-readable explanation of the status. * @param statusMessage statusMessage or {@code null} for none */ public Instance setStatusMessage(java.lang.String statusMessage) { this.statusMessage = statusMessage; return this; } /** * Tags to apply to this instance. Tags are used to identify valid sources or targets for network * firewalls and are specified by the client during instance creation. The tags can be later * modified by the setTags method. Each tag within the list must comply with RFC1035. Multiple * tags can be specified via the 'tags.items' field. * @return value or {@code null} for none */ public Tags getTags() { return tags; } /** * Tags to apply to this instance. Tags are used to identify valid sources or targets for network * firewalls and are specified by the client during instance creation. The tags can be later * modified by the setTags method. Each tag within the list must comply with RFC1035. Multiple * tags can be specified via the 'tags.items' field. * @param tags tags or {@code null} for none */ public Instance setTags(Tags tags) { this.tags = tags; return this; } /** * [Output Only] URL of the zone where the instance resides. You must specify this field as part * of the HTTP request URL. It is not settable as a field in the request body. * @return value or {@code null} for none */ public java.lang.String getZone() { return zone; } /** * [Output Only] URL of the zone where the instance resides. You must specify this field as part * of the HTTP request URL. It is not settable as a field in the request body. * @param zone zone or {@code null} for none */ public Instance setZone(java.lang.String zone) { this.zone = zone; return this; } @Override public Instance set(String fieldName, Object value) { return (Instance) super.set(fieldName, value); } @Override public Instance clone() { return (Instance) super.clone(); } }
googleapis/google-cloud-java
35,959
java-bigquery-data-exchange/proto-google-cloud-bigquery-data-exchange-v1beta1/src/main/java/com/google/cloud/bigquery/dataexchange/v1beta1/SubscribeListingRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/bigquery/dataexchange/v1beta1/dataexchange.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.bigquery.dataexchange.v1beta1; /** * * * <pre> * Message for subscribing to a listing. * </pre> * * Protobuf type {@code google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest} */ public final class SubscribeListingRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest) SubscribeListingRequestOrBuilder { private static final long serialVersionUID = 0L; // Use SubscribeListingRequest.newBuilder() to construct. private SubscribeListingRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private SubscribeListingRequest() { name_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new SubscribeListingRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.bigquery.dataexchange.v1beta1.DataExchangeProto .internal_static_google_cloud_bigquery_dataexchange_v1beta1_SubscribeListingRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.bigquery.dataexchange.v1beta1.DataExchangeProto .internal_static_google_cloud_bigquery_dataexchange_v1beta1_SubscribeListingRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest.class, com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest.Builder.class); } private int destinationCase_ = 0; @SuppressWarnings("serial") private java.lang.Object destination_; public enum DestinationCase implements com.google.protobuf.Internal.EnumLite, com.google.protobuf.AbstractMessage.InternalOneOfEnum { DESTINATION_DATASET(3), DESTINATION_NOT_SET(0); private final int value; private DestinationCase(int value) { this.value = value; } /** * @param value The number of the enum to look for. * @return The enum associated with the given number. * @deprecated Use {@link #forNumber(int)} instead. */ @java.lang.Deprecated public static DestinationCase valueOf(int value) { return forNumber(value); } public static DestinationCase forNumber(int value) { switch (value) { case 3: return DESTINATION_DATASET; case 0: return DESTINATION_NOT_SET; default: return null; } } public int getNumber() { return this.value; } }; public DestinationCase getDestinationCase() { return DestinationCase.forNumber(destinationCase_); } public static final int DESTINATION_DATASET_FIELD_NUMBER = 3; /** * * * <pre> * BigQuery destination dataset to create for the subscriber. * </pre> * * <code>.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset destination_dataset = 3; * </code> * * @return Whether the destinationDataset field is set. */ @java.lang.Override public boolean hasDestinationDataset() { return destinationCase_ == 3; } /** * * * <pre> * BigQuery destination dataset to create for the subscriber. * </pre> * * <code>.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset destination_dataset = 3; * </code> * * @return The destinationDataset. */ @java.lang.Override public com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset getDestinationDataset() { if (destinationCase_ == 3) { return (com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset) destination_; } return com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset.getDefaultInstance(); } /** * * * <pre> * BigQuery destination dataset to create for the subscriber. * </pre> * * <code>.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset destination_dataset = 3; * </code> */ @java.lang.Override public com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDatasetOrBuilder getDestinationDatasetOrBuilder() { if (destinationCase_ == 3) { return (com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset) destination_; } return com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset.getDefaultInstance(); } public static final int NAME_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object name_ = ""; /** * * * <pre> * Required. Resource name of the listing that you want to subscribe to. * e.g. `projects/myproject/locations/US/dataExchanges/123/listings/456`. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ @java.lang.Override public java.lang.String getName() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } } /** * * * <pre> * Required. Resource name of the listing that you want to subscribe to. * e.g. `projects/myproject/locations/US/dataExchanges/123/listings/456`. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ @java.lang.Override public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, name_); } if (destinationCase_ == 3) { output.writeMessage( 3, (com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset) destination_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(name_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, name_); } if (destinationCase_ == 3) { size += com.google.protobuf.CodedOutputStream.computeMessageSize( 3, (com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset) destination_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest)) { return super.equals(obj); } com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest other = (com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest) obj; if (!getName().equals(other.getName())) return false; if (!getDestinationCase().equals(other.getDestinationCase())) return false; switch (destinationCase_) { case 3: if (!getDestinationDataset().equals(other.getDestinationDataset())) return false; break; case 0: default: } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + NAME_FIELD_NUMBER; hash = (53 * hash) + getName().hashCode(); switch (destinationCase_) { case 3: hash = (37 * hash) + DESTINATION_DATASET_FIELD_NUMBER; hash = (53 * hash) + getDestinationDataset().hashCode(); break; case 0: default: } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest parseFrom( byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest parseDelimitedFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Message for subscribing to a listing. * </pre> * * Protobuf type {@code google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest) com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.bigquery.dataexchange.v1beta1.DataExchangeProto .internal_static_google_cloud_bigquery_dataexchange_v1beta1_SubscribeListingRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.bigquery.dataexchange.v1beta1.DataExchangeProto .internal_static_google_cloud_bigquery_dataexchange_v1beta1_SubscribeListingRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest.class, com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest.Builder.class); } // Construct using // com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; if (destinationDatasetBuilder_ != null) { destinationDatasetBuilder_.clear(); } name_ = ""; destinationCase_ = 0; destination_ = null; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.bigquery.dataexchange.v1beta1.DataExchangeProto .internal_static_google_cloud_bigquery_dataexchange_v1beta1_SubscribeListingRequest_descriptor; } @java.lang.Override public com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest getDefaultInstanceForType() { return com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest .getDefaultInstance(); } @java.lang.Override public com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest build() { com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest buildPartial() { com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest result = new com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest(this); if (bitField0_ != 0) { buildPartial0(result); } buildPartialOneofs(result); onBuilt(); return result; } private void buildPartial0( com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000002) != 0)) { result.name_ = name_; } } private void buildPartialOneofs( com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest result) { result.destinationCase_ = destinationCase_; result.destination_ = this.destination_; if (destinationCase_ == 3 && destinationDatasetBuilder_ != null) { result.destination_ = destinationDatasetBuilder_.build(); } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest) { return mergeFrom( (com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom( com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest other) { if (other == com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest .getDefaultInstance()) return this; if (!other.getName().isEmpty()) { name_ = other.name_; bitField0_ |= 0x00000002; onChanged(); } switch (other.getDestinationCase()) { case DESTINATION_DATASET: { mergeDestinationDataset(other.getDestinationDataset()); break; } case DESTINATION_NOT_SET: { break; } } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { name_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 10 case 26: { input.readMessage( getDestinationDatasetFieldBuilder().getBuilder(), extensionRegistry); destinationCase_ = 3; break; } // case 26 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int destinationCase_ = 0; private java.lang.Object destination_; public DestinationCase getDestinationCase() { return DestinationCase.forNumber(destinationCase_); } public Builder clearDestination() { destinationCase_ = 0; destination_ = null; onChanged(); return this; } private int bitField0_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset, com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset.Builder, com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDatasetOrBuilder> destinationDatasetBuilder_; /** * * * <pre> * BigQuery destination dataset to create for the subscriber. * </pre> * * <code>.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset destination_dataset = 3; * </code> * * @return Whether the destinationDataset field is set. */ @java.lang.Override public boolean hasDestinationDataset() { return destinationCase_ == 3; } /** * * * <pre> * BigQuery destination dataset to create for the subscriber. * </pre> * * <code>.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset destination_dataset = 3; * </code> * * @return The destinationDataset. */ @java.lang.Override public com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset getDestinationDataset() { if (destinationDatasetBuilder_ == null) { if (destinationCase_ == 3) { return (com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset) destination_; } return com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset .getDefaultInstance(); } else { if (destinationCase_ == 3) { return destinationDatasetBuilder_.getMessage(); } return com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset .getDefaultInstance(); } } /** * * * <pre> * BigQuery destination dataset to create for the subscriber. * </pre> * * <code>.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset destination_dataset = 3; * </code> */ public Builder setDestinationDataset( com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset value) { if (destinationDatasetBuilder_ == null) { if (value == null) { throw new NullPointerException(); } destination_ = value; onChanged(); } else { destinationDatasetBuilder_.setMessage(value); } destinationCase_ = 3; return this; } /** * * * <pre> * BigQuery destination dataset to create for the subscriber. * </pre> * * <code>.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset destination_dataset = 3; * </code> */ public Builder setDestinationDataset( com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset.Builder builderForValue) { if (destinationDatasetBuilder_ == null) { destination_ = builderForValue.build(); onChanged(); } else { destinationDatasetBuilder_.setMessage(builderForValue.build()); } destinationCase_ = 3; return this; } /** * * * <pre> * BigQuery destination dataset to create for the subscriber. * </pre> * * <code>.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset destination_dataset = 3; * </code> */ public Builder mergeDestinationDataset( com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset value) { if (destinationDatasetBuilder_ == null) { if (destinationCase_ == 3 && destination_ != com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset .getDefaultInstance()) { destination_ = com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset.newBuilder( (com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset) destination_) .mergeFrom(value) .buildPartial(); } else { destination_ = value; } onChanged(); } else { if (destinationCase_ == 3) { destinationDatasetBuilder_.mergeFrom(value); } else { destinationDatasetBuilder_.setMessage(value); } } destinationCase_ = 3; return this; } /** * * * <pre> * BigQuery destination dataset to create for the subscriber. * </pre> * * <code>.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset destination_dataset = 3; * </code> */ public Builder clearDestinationDataset() { if (destinationDatasetBuilder_ == null) { if (destinationCase_ == 3) { destinationCase_ = 0; destination_ = null; onChanged(); } } else { if (destinationCase_ == 3) { destinationCase_ = 0; destination_ = null; } destinationDatasetBuilder_.clear(); } return this; } /** * * * <pre> * BigQuery destination dataset to create for the subscriber. * </pre> * * <code>.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset destination_dataset = 3; * </code> */ public com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset.Builder getDestinationDatasetBuilder() { return getDestinationDatasetFieldBuilder().getBuilder(); } /** * * * <pre> * BigQuery destination dataset to create for the subscriber. * </pre> * * <code>.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset destination_dataset = 3; * </code> */ @java.lang.Override public com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDatasetOrBuilder getDestinationDatasetOrBuilder() { if ((destinationCase_ == 3) && (destinationDatasetBuilder_ != null)) { return destinationDatasetBuilder_.getMessageOrBuilder(); } else { if (destinationCase_ == 3) { return (com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset) destination_; } return com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset .getDefaultInstance(); } } /** * * * <pre> * BigQuery destination dataset to create for the subscriber. * </pre> * * <code>.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset destination_dataset = 3; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset, com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset.Builder, com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDatasetOrBuilder> getDestinationDatasetFieldBuilder() { if (destinationDatasetBuilder_ == null) { if (!(destinationCase_ == 3)) { destination_ = com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset .getDefaultInstance(); } destinationDatasetBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset, com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset.Builder, com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDatasetOrBuilder>( (com.google.cloud.bigquery.dataexchange.v1beta1.DestinationDataset) destination_, getParentForChildren(), isClean()); destination_ = null; } destinationCase_ = 3; onChanged(); return destinationDatasetBuilder_; } private java.lang.Object name_ = ""; /** * * * <pre> * Required. Resource name of the listing that you want to subscribe to. * e.g. `projects/myproject/locations/US/dataExchanges/123/listings/456`. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The name. */ public java.lang.String getName() { java.lang.Object ref = name_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); name_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Required. Resource name of the listing that you want to subscribe to. * e.g. `projects/myproject/locations/US/dataExchanges/123/listings/456`. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return The bytes for name. */ public com.google.protobuf.ByteString getNameBytes() { java.lang.Object ref = name_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); name_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Required. Resource name of the listing that you want to subscribe to. * e.g. `projects/myproject/locations/US/dataExchanges/123/listings/456`. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The name to set. * @return This builder for chaining. */ public Builder setName(java.lang.String value) { if (value == null) { throw new NullPointerException(); } name_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Required. Resource name of the listing that you want to subscribe to. * e.g. `projects/myproject/locations/US/dataExchanges/123/listings/456`. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @return This builder for chaining. */ public Builder clearName() { name_ = getDefaultInstance().getName(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Required. Resource name of the listing that you want to subscribe to. * e.g. `projects/myproject/locations/US/dataExchanges/123/listings/456`. * </pre> * * <code> * string name = 1 [(.google.api.field_behavior) = REQUIRED, (.google.api.resource_reference) = { ... } * </code> * * @param value The bytes for name to set. * @return This builder for chaining. */ public Builder setNameBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); name_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest) } // @@protoc_insertion_point(class_scope:google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest) private static final com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest(); } public static com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<SubscribeListingRequest> PARSER = new com.google.protobuf.AbstractParser<SubscribeListingRequest>() { @java.lang.Override public SubscribeListingRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<SubscribeListingRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<SubscribeListingRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.bigquery.dataexchange.v1beta1.SubscribeListingRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/fluss
36,200
fluss-server/src/test/java/org/apache/fluss/server/replica/ReplicaTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more * contributor license agreements. See the NOTICE file distributed with * this work for additional information regarding copyright ownership. * The ASF licenses this file to You under the Apache License, Version 2.0 * (the "License"); you may not use this file except in compliance with * the License. You may obtain a copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package org.apache.fluss.server.replica; import org.apache.fluss.config.ConfigOptions; import org.apache.fluss.exception.OutOfOrderSequenceException; import org.apache.fluss.metadata.LogFormat; import org.apache.fluss.metadata.PhysicalTablePath; import org.apache.fluss.metadata.TableBucket; import org.apache.fluss.metadata.TablePath; import org.apache.fluss.record.ChangeType; import org.apache.fluss.record.KvRecordBatch; import org.apache.fluss.record.KvRecordTestUtils; import org.apache.fluss.record.LogRecordBatch; import org.apache.fluss.record.LogRecords; import org.apache.fluss.record.MemoryLogRecords; import org.apache.fluss.server.entity.NotifyLeaderAndIsrData; import org.apache.fluss.server.kv.KvTablet; import org.apache.fluss.server.kv.snapshot.CompletedSnapshot; import org.apache.fluss.server.kv.snapshot.TestingCompletedKvSnapshotCommitter; import org.apache.fluss.server.log.FetchParams; import org.apache.fluss.server.log.LogAppendInfo; import org.apache.fluss.server.log.LogReadInfo; import org.apache.fluss.server.testutils.KvTestUtils; import org.apache.fluss.server.zk.NOPErrorHandler; import org.apache.fluss.server.zk.data.LeaderAndIsr; import org.apache.fluss.testutils.DataTestUtils; import org.apache.fluss.testutils.common.ManuallyTriggeredScheduledExecutorService; import org.apache.fluss.utils.types.Tuple2; import org.junit.jupiter.api.Test; import org.junit.jupiter.api.io.TempDir; import java.io.File; import java.io.IOException; import java.nio.file.Path; import java.time.Duration; import java.util.ArrayList; import java.util.Arrays; import java.util.Collections; import java.util.Iterator; import java.util.List; import java.util.concurrent.ScheduledFuture; import java.util.concurrent.TimeUnit; import static org.apache.fluss.compression.ArrowCompressionInfo.DEFAULT_COMPRESSION; import static org.apache.fluss.record.LogRecordBatch.CURRENT_LOG_MAGIC_VALUE; import static org.apache.fluss.record.LogRecordBatchFormat.NO_BATCH_SEQUENCE; import static org.apache.fluss.record.LogRecordBatchFormat.NO_WRITER_ID; import static org.apache.fluss.record.TestData.DATA1; import static org.apache.fluss.record.TestData.DATA1_PHYSICAL_TABLE_PATH; import static org.apache.fluss.record.TestData.DATA1_PHYSICAL_TABLE_PATH_PK; import static org.apache.fluss.record.TestData.DATA1_ROW_TYPE; import static org.apache.fluss.record.TestData.DATA1_TABLE_ID; import static org.apache.fluss.record.TestData.DATA1_TABLE_ID_PK; import static org.apache.fluss.record.TestData.DATA1_TABLE_PATH; import static org.apache.fluss.record.TestData.DATA1_TABLE_PATH_PK; import static org.apache.fluss.record.TestData.DEFAULT_SCHEMA_ID; import static org.apache.fluss.server.coordinator.CoordinatorContext.INITIAL_COORDINATOR_EPOCH; import static org.apache.fluss.server.zk.data.LeaderAndIsr.INITIAL_LEADER_EPOCH; import static org.apache.fluss.testutils.DataTestUtils.assertLogRecordsEquals; import static org.apache.fluss.testutils.DataTestUtils.createBasicMemoryLogRecords; import static org.apache.fluss.testutils.DataTestUtils.genKvRecordBatch; import static org.apache.fluss.testutils.DataTestUtils.genKvRecords; import static org.apache.fluss.testutils.DataTestUtils.genMemoryLogRecordsByObject; import static org.apache.fluss.testutils.DataTestUtils.genMemoryLogRecordsWithWriterId; import static org.apache.fluss.testutils.DataTestUtils.getKeyValuePairs; import static org.apache.fluss.testutils.LogRecordsAssert.assertThatLogRecords; import static org.apache.fluss.utils.Preconditions.checkNotNull; import static org.assertj.core.api.Assertions.assertThat; import static org.assertj.core.api.Assertions.assertThatThrownBy; /** Test for {@link Replica}. */ final class ReplicaTest extends ReplicaTestBase { // TODO add more tests refer to kafka's PartitionTest. // TODO add more tests to cover partition table @Test void testMakeLeader() throws Exception { Replica logReplica = makeLogReplica(DATA1_PHYSICAL_TABLE_PATH, new TableBucket(DATA1_TABLE_ID, 1)); // log table. assertThat(logReplica.isKvTable()).isFalse(); assertThat(logReplica.getLogTablet()).isNotNull(); assertThat(logReplica.getKvTablet()).isNull(); makeLogReplicaAsLeader(logReplica); assertThat(logReplica.getLogTablet()).isNotNull(); assertThat(logReplica.getKvTablet()).isNull(); Replica kvReplica = makeKvReplica(DATA1_PHYSICAL_TABLE_PATH_PK, new TableBucket(DATA1_TABLE_ID_PK, 1)); // Kv table. assertThat(kvReplica.isKvTable()).isTrue(); assertThat(kvReplica.getLogTablet()).isNotNull(); makeKvReplicaAsLeader(kvReplica); assertThat(kvReplica.getLogTablet()).isNotNull(); assertThat(kvReplica.getKvTablet()).isNotNull(); } @Test void testAppendRecordsToLeader() throws Exception { Replica logReplica = makeLogReplica(DATA1_PHYSICAL_TABLE_PATH, new TableBucket(DATA1_TABLE_ID, 1)); makeLogReplicaAsLeader(logReplica); MemoryLogRecords mr = genMemoryLogRecordsByObject(DATA1); LogAppendInfo appendInfo = logReplica.appendRecordsToLeader(mr, 0); assertThat(appendInfo.shallowCount()).isEqualTo(1); FetchParams fetchParams = new FetchParams( -1, (int) conf.get(ConfigOptions.CLIENT_SCANNER_LOG_FETCH_MAX_BYTES) .getBytes()); fetchParams.setCurrentFetch( DATA1_TABLE_ID, 0, Integer.MAX_VALUE, DATA1_ROW_TYPE, DEFAULT_COMPRESSION, null); LogReadInfo logReadInfo = logReplica.fetchRecords(fetchParams); assertLogRecordsEquals(DATA1_ROW_TYPE, logReadInfo.getFetchedData().getRecords(), DATA1); } @Test void testAppendRecordsWithOutOfOrderBatchSequence() throws Exception { Replica logReplica = makeLogReplica(DATA1_PHYSICAL_TABLE_PATH, new TableBucket(DATA1_TABLE_ID, 1)); makeLogReplicaAsLeader(logReplica); long writerId = 101L; // 1. append a batch with batchSequence = 0 logReplica.appendRecordsToLeader(genMemoryLogRecordsWithWriterId(DATA1, writerId, 0, 0), 0); // manual advance time and remove expired writer, the state of writer 101 will be removed manualClock.advanceTime(Duration.ofHours(12)); manualClock.advanceTime(Duration.ofSeconds(1)); assertThat(logReplica.getLogTablet().writerStateManager().activeWriters().size()) .isEqualTo(1); logReplica.getLogTablet().removeExpiredWriter(manualClock.milliseconds()); assertThat(logReplica.getLogTablet().writerStateManager().activeWriters().size()) .isEqualTo(0); // 2. try to append an out of ordered batch as leader, will throw // OutOfOrderSequenceException assertThatThrownBy( () -> logReplica.appendRecordsToLeader( genMemoryLogRecordsWithWriterId(DATA1, writerId, 2, 10), 0)) .isInstanceOf(OutOfOrderSequenceException.class); assertThat(logReplica.getLocalLogEndOffset()).isEqualTo(10); // 3. try to append an out of ordered batch as follower logReplica.appendRecordsToFollower(genMemoryLogRecordsWithWriterId(DATA1, writerId, 2, 10)); assertThat(logReplica.getLocalLogEndOffset()).isEqualTo(20); } @Test void testPartialPutRecordsToLeader() throws Exception { Replica kvReplica = makeKvReplica(DATA1_PHYSICAL_TABLE_PATH_PK, new TableBucket(DATA1_TABLE_ID_PK, 1)); makeKvReplicaAsLeader(kvReplica); // two records in a batch with same key, should also generate +I/-U/+U KvRecordTestUtils.KvRecordFactory kvRecordFactory = KvRecordTestUtils.KvRecordFactory.of(DATA1_ROW_TYPE); KvRecordTestUtils.KvRecordBatchFactory kvRecordBatchFactory = KvRecordTestUtils.KvRecordBatchFactory.of(DEFAULT_SCHEMA_ID); KvRecordBatch kvRecords = kvRecordBatchFactory.ofRecords( kvRecordFactory.ofRecord("k1", new Object[] {1, null}), kvRecordFactory.ofRecord("k1", new Object[] {2, null}), kvRecordFactory.ofRecord("k2", new Object[] {3, null})); int[] targetColumns = new int[] {0}; // put records putRecordsToLeader(kvReplica, kvRecords, targetColumns); targetColumns = new int[] {0, 1}; kvRecords = kvRecordBatchFactory.ofRecords( kvRecordFactory.ofRecord("k1", new Object[] {2, "aa"}), kvRecordFactory.ofRecord("k2", new Object[] {3, "bb2"}), kvRecordFactory.ofRecord("k2", new Object[] {3, "bb4"})); LogAppendInfo logAppendInfo = putRecordsToLeader(kvReplica, kvRecords, targetColumns); assertThat(logAppendInfo.lastOffset()).isEqualTo(9); MemoryLogRecords expected = logRecords( 4, Arrays.asList( ChangeType.UPDATE_BEFORE, ChangeType.UPDATE_AFTER, ChangeType.UPDATE_BEFORE, ChangeType.UPDATE_AFTER, ChangeType.UPDATE_BEFORE, ChangeType.UPDATE_AFTER), Arrays.asList( // for k1 new Object[] {2, null}, new Object[] {2, "aa"}, // for k2 new Object[] {3, null}, new Object[] {3, "bb2"}, // for k2 new Object[] {3, "bb2"}, new Object[] {3, "bb4"})); assertThatLogRecords(fetchRecords(kvReplica, 4)) .withSchema(DATA1_ROW_TYPE) .isEqualTo(expected); } @Test void testPutRecordsToLeader() throws Exception { Replica kvReplica = makeKvReplica(DATA1_PHYSICAL_TABLE_PATH_PK, new TableBucket(DATA1_TABLE_ID_PK, 1)); makeKvReplicaAsLeader(kvReplica); // two records in a batch with same key, should also generate +I/-U/+U KvRecordTestUtils.KvRecordFactory kvRecordFactory = KvRecordTestUtils.KvRecordFactory.of(DATA1_ROW_TYPE); KvRecordTestUtils.KvRecordBatchFactory kvRecordBatchFactory = KvRecordTestUtils.KvRecordBatchFactory.of(DEFAULT_SCHEMA_ID); KvRecordBatch kvRecords = kvRecordBatchFactory.ofRecords( kvRecordFactory.ofRecord("k1", new Object[] {1, "a"}), kvRecordFactory.ofRecord("k1", new Object[] {2, "b"}), kvRecordFactory.ofRecord("k2", new Object[] {3, "b1"})); LogAppendInfo logAppendInfo = putRecordsToLeader(kvReplica, kvRecords); assertThat(logAppendInfo.lastOffset()).isEqualTo(3); MemoryLogRecords expected = logRecords( 0L, Arrays.asList( ChangeType.INSERT, ChangeType.UPDATE_BEFORE, ChangeType.UPDATE_AFTER, ChangeType.INSERT), Arrays.asList( new Object[] {1, "a"}, new Object[] {1, "a"}, new Object[] {2, "b"}, new Object[] {3, "b1"})); assertThatLogRecords(fetchRecords(kvReplica)) .withSchema(DATA1_ROW_TYPE) .isEqualTo(expected); int currentOffset = 4; // now, append another batch, it should also produce // delete & update_before & update_after message kvRecords = kvRecordBatchFactory.ofRecords( kvRecordFactory.ofRecord("k1", null), kvRecordFactory.ofRecord("k2", new Object[] {4, "b2"}), kvRecordFactory.ofRecord("k2", new Object[] {5, "b4"})); logAppendInfo = putRecordsToLeader(kvReplica, kvRecords); assertThat(logAppendInfo.lastOffset()).isEqualTo(8); expected = logRecords( currentOffset, Arrays.asList( ChangeType.DELETE, ChangeType.UPDATE_BEFORE, ChangeType.UPDATE_AFTER, ChangeType.UPDATE_BEFORE, ChangeType.UPDATE_AFTER), Arrays.asList( // for k1 new Object[] {2, "b"}, // for k2 new Object[] {3, "b1"}, new Object[] {4, "b2"}, // for k2 new Object[] {4, "b2"}, new Object[] {5, "b4"})); assertThatLogRecords(fetchRecords(kvReplica, currentOffset)) .withSchema(DATA1_ROW_TYPE) .isEqualTo(expected); currentOffset += 5; // put for k1, delete for k2, put for k3; it should produce // +I for k1 since k1 has been deleted, -D for k2; +I for k3 kvRecords = kvRecordBatchFactory.ofRecords( kvRecordFactory.ofRecord("k1", new Object[] {1, "a1"}), kvRecordFactory.ofRecord("k2", null), kvRecordFactory.ofRecord("k3", new Object[] {6, "b4"})); logAppendInfo = putRecordsToLeader(kvReplica, kvRecords); assertThat(logAppendInfo.lastOffset()).isEqualTo(11); expected = logRecords( currentOffset, Arrays.asList(ChangeType.INSERT, ChangeType.DELETE, ChangeType.INSERT), Arrays.asList( // for k1 new Object[] {1, "a1"}, // for k2 new Object[] {5, "b4"}, // for k3 new Object[] {6, "b4"})); assertThatLogRecords(fetchRecords(kvReplica, currentOffset)) .withSchema(DATA1_ROW_TYPE) .isEqualTo(expected); currentOffset += 3; // delete k2 again, will produce a batch with empty record. kvRecords = kvRecordBatchFactory.ofRecords(kvRecordFactory.ofRecord("k2", null)); logAppendInfo = putRecordsToLeader(kvReplica, kvRecords); assertThat(logAppendInfo.lastOffset()).isEqualTo(12); LogRecords logRecords = fetchRecords(kvReplica, currentOffset); Iterator<LogRecordBatch> iterator = logRecords.batches().iterator(); assertThat(iterator.hasNext()).isTrue(); LogRecordBatch batch = iterator.next(); assertThat(batch.getRecordCount()).isEqualTo(0); currentOffset += 1; // delete k1 and put k1 again, should produce -D, +I kvRecords = kvRecordBatchFactory.ofRecords( kvRecordFactory.ofRecord("k1", null), kvRecordFactory.ofRecord("k1", new Object[] {1, "aaa"})); logAppendInfo = putRecordsToLeader(kvReplica, kvRecords); assertThat(logAppendInfo.lastOffset()).isEqualTo(14); expected = logRecords( currentOffset, Arrays.asList(ChangeType.DELETE, ChangeType.INSERT), Arrays.asList(new Object[] {1, "a1"}, new Object[] {1, "aaa"})); assertThatLogRecords(fetchRecords(kvReplica, currentOffset)) .withSchema(DATA1_ROW_TYPE) .isEqualTo(expected); } @Test void testKvReplicaSnapshot(@TempDir File snapshotKvTabletDir) throws Exception { TableBucket tableBucket = new TableBucket(DATA1_TABLE_ID_PK, 1); // create test context TestSnapshotContext testKvSnapshotContext = new TestSnapshotContext(snapshotKvTabletDir.getPath()); ManuallyTriggeredScheduledExecutorService scheduledExecutorService = testKvSnapshotContext.scheduledExecutorService; TestingCompletedKvSnapshotCommitter kvSnapshotStore = testKvSnapshotContext.testKvSnapshotStore; // make a kv replica Replica kvReplica = makeKvReplica(DATA1_PHYSICAL_TABLE_PATH_PK, tableBucket, testKvSnapshotContext); makeKvReplicaAsLeader(kvReplica); KvRecordBatch kvRecords = genKvRecordBatch( Tuple2.of("k1", new Object[] {1, "a"}), Tuple2.of("k1", new Object[] {2, "b"}), Tuple2.of("k2", new Object[] {3, "b1"})); putRecordsToLeader(kvReplica, kvRecords); // trigger one snapshot, scheduledExecutorService.triggerNonPeriodicScheduledTask(); // wait until the snapshot 0 success CompletedSnapshot completedSnapshot0 = kvSnapshotStore.waitUntilSnapshotComplete(tableBucket, 0); // check snapshot long expectedLogOffset = 4; List<Tuple2<byte[], byte[]>> expectedKeyValues = getKeyValuePairs( genKvRecords( Tuple2.of("k1", new Object[] {2, "b"}), Tuple2.of("k2", new Object[] {3, "b1"}))); KvTestUtils.checkSnapshot(completedSnapshot0, expectedKeyValues, expectedLogOffset); // put some data again kvRecords = genKvRecordBatch(Tuple2.of("k2", new Object[] {4, "bk2"}), Tuple2.of("k1", null)); putRecordsToLeader(kvReplica, kvRecords); // trigger next checkpoint scheduledExecutorService.triggerNonPeriodicScheduledTask(); // wait until the snapshot 1 success CompletedSnapshot completedSnapshot1 = kvSnapshotStore.waitUntilSnapshotComplete(tableBucket, 1); // check snapshot expectedLogOffset = 7; expectedKeyValues = getKeyValuePairs(genKvRecords(Tuple2.of("k2", new Object[] {4, "bk2"}))); KvTestUtils.checkSnapshot(completedSnapshot1, expectedKeyValues, expectedLogOffset); // check the snapshot should be incremental, with only one newly file KvTestUtils.checkSnapshotIncrementWithNewlyFiles( completedSnapshot1.getKvSnapshotHandle(), completedSnapshot0.getKvSnapshotHandle(), 1); // now, make the replica as follower to make kv can be destroyed makeKvReplicaAsFollower(kvReplica, 1); // make a new kv replica testKvSnapshotContext = new TestSnapshotContext(snapshotKvTabletDir.getPath(), kvSnapshotStore); kvReplica = makeKvReplica(DATA1_PHYSICAL_TABLE_PATH_PK, tableBucket, testKvSnapshotContext); scheduledExecutorService = testKvSnapshotContext.scheduledExecutorService; kvSnapshotStore = testKvSnapshotContext.testKvSnapshotStore; makeKvReplicaAsFollower(kvReplica, 1); // check the kv tablet should be null since it has become follower assertThat(kvReplica.getKvTablet()).isNull(); // make as leader again, should restore from snapshot makeKvReplicaAsLeader(kvReplica, 2); // put some data kvRecords = genKvRecordBatch( Tuple2.of("k2", new Object[] {4, "bk21"}), Tuple2.of("k3", new Object[] {5, "k3"})); putRecordsToLeader(kvReplica, kvRecords); // trigger another one snapshot, scheduledExecutorService.triggerNonPeriodicScheduledTask(); // wait until the snapshot 2 success CompletedSnapshot completedSnapshot2 = kvSnapshotStore.waitUntilSnapshotComplete(tableBucket, 2); expectedLogOffset = 10; expectedKeyValues = getKeyValuePairs( genKvRecords( Tuple2.of("k2", new Object[] {4, "bk21"}), Tuple2.of("k3", new Object[] {5, "k3"}))); KvTestUtils.checkSnapshot(completedSnapshot2, expectedKeyValues, expectedLogOffset); } @Test void testSnapshotUseLatestLeaderEpoch(@TempDir File snapshotKvTabletDir) throws Exception { TableBucket tableBucket = new TableBucket(DATA1_TABLE_ID_PK, 1); // create test context ImmediateTriggeredScheduledExecutorService immediateTriggeredScheduledExecutorService = new ImmediateTriggeredScheduledExecutorService(); TestSnapshotContext testKvSnapshotContext = new TestSnapshotContext( snapshotKvTabletDir.getPath(), immediateTriggeredScheduledExecutorService); TestingCompletedKvSnapshotCommitter kvSnapshotStore = testKvSnapshotContext.testKvSnapshotStore; // make a kv replica Replica kvReplica = makeKvReplica(DATA1_PHYSICAL_TABLE_PATH_PK, tableBucket, testKvSnapshotContext); // now, make the replica as leader makeKvReplicaAsLeader(kvReplica, 0); KvRecordBatch kvRecords = genKvRecordBatch( Tuple2.of("k1", new Object[] {1, "a"}), Tuple2.of("k2", new Object[] {2, "b"})); putRecordsToLeader(kvReplica, kvRecords); // make leader again with a new epoch, check the snapshot should use the new epoch immediateTriggeredScheduledExecutorService.reset(); int latestLeaderEpoch = 1; int snapshot = 0; makeKvReplicaAsLeader(kvReplica, latestLeaderEpoch); kvSnapshotStore.waitUntilSnapshotComplete(tableBucket, snapshot); assertThat(kvSnapshotStore.getSnapshotLeaderEpoch(tableBucket, snapshot)) .isEqualTo(latestLeaderEpoch); } @Test void testBrokenSnapshotRecovery(@TempDir File snapshotKvTabletDir) throws Exception { TableBucket tableBucket = new TableBucket(DATA1_TABLE_ID_PK, 1); // create test context with custom snapshot store TestSnapshotContext testKvSnapshotContext = new TestSnapshotContext(snapshotKvTabletDir.getPath()); ManuallyTriggeredScheduledExecutorService scheduledExecutorService = testKvSnapshotContext.scheduledExecutorService; TestingCompletedKvSnapshotCommitter kvSnapshotStore = testKvSnapshotContext.testKvSnapshotStore; // create a replica and make it leader Replica kvReplica = makeKvReplica(DATA1_PHYSICAL_TABLE_PATH_PK, tableBucket, testKvSnapshotContext); makeKvReplicaAsLeader(kvReplica); // put initial data and create first snapshot KvRecordBatch kvRecords = genKvRecordBatch( Tuple2.of("k1", new Object[] {1, "a"}), Tuple2.of("k2", new Object[] {2, "b"})); putRecordsToLeader(kvReplica, kvRecords); // trigger first snapshot scheduledExecutorService.triggerNonPeriodicScheduledTask(); kvSnapshotStore.waitUntilSnapshotComplete(tableBucket, 0); // put more data and create second snapshot kvRecords = genKvRecordBatch( Tuple2.of("k1", new Object[] {3, "c"}), Tuple2.of("k3", new Object[] {4, "d"})); putRecordsToLeader(kvReplica, kvRecords); // trigger second snapshot scheduledExecutorService.triggerNonPeriodicScheduledTask(); kvSnapshotStore.waitUntilSnapshotComplete(tableBucket, 1); // put more data and create third snapshot (this will be the broken one) kvRecords = genKvRecordBatch( Tuple2.of("k4", new Object[] {5, "e"}), Tuple2.of("k5", new Object[] {6, "f"})); putRecordsToLeader(kvReplica, kvRecords); // trigger third snapshot scheduledExecutorService.triggerNonPeriodicScheduledTask(); CompletedSnapshot snapshot2 = kvSnapshotStore.waitUntilSnapshotComplete(tableBucket, 2); // verify that snapshot2 is the latest one before we break it assertThat(kvSnapshotStore.getLatestCompletedSnapshot(tableBucket).getSnapshotID()) .isEqualTo(2); // now simulate the latest snapshot (snapshot2) being broken by // deleting its metadata files and unshared SST files // This simulates file corruption while ZK metadata remains intact snapshot2.getKvSnapshotHandle().discard(); // ZK metadata should still show snapshot2 as latest (file corruption hasn't been detected // yet) assertThat(kvSnapshotStore.getLatestCompletedSnapshot(tableBucket).getSnapshotID()) .isEqualTo(2); // make the replica follower to destroy the current kv tablet makeKvReplicaAsFollower(kvReplica, 1); // create a new replica with the same snapshot context // During initialization, it will try to use snapshot2 but find it broken, // then handle the broken snapshot and fall back to snapshot1 testKvSnapshotContext = new TestSnapshotContext(snapshotKvTabletDir.getPath(), kvSnapshotStore); kvReplica = makeKvReplica(DATA1_PHYSICAL_TABLE_PATH_PK, tableBucket, testKvSnapshotContext); // make it leader again - this should trigger the broken snapshot recovery logic // The system should detect that snapshot2 files are missing, clean up its metadata, // and successfully recover using snapshot1 makeKvReplicaAsLeader(kvReplica, 2); // verify that KvTablet is successfully initialized despite the broken snapshot assertThat(kvReplica.getKvTablet()).isNotNull(); KvTablet kvTablet = kvReplica.getKvTablet(); // verify that the data from snapshot1 is restored (snapshot2 was broken and cleaned up) // snapshot1 should contain: k1->3,c and k3->4,d List<Tuple2<byte[], byte[]>> expectedKeyValues = getKeyValuePairs( genKvRecords( Tuple2.of("k1", new Object[] {3, "c"}), Tuple2.of("k3", new Object[] {4, "d"}))); verifyGetKeyValues(kvTablet, expectedKeyValues); // Verify the core functionality: KvTablet successfully initialized despite broken snapshot // The key test is that the system can handle broken snapshots and recover correctly // Verify that we successfully simulated the broken snapshot condition File metadataFile = new File(snapshot2.getMetadataFilePath().getPath()); assertThat(metadataFile.exists()).isFalse(); } @Test void testRestore(@TempDir Path snapshotKvTabletDirPath) throws Exception { TableBucket tableBucket = new TableBucket(DATA1_TABLE_ID_PK, 1); TestSnapshotContext testKvSnapshotContext = new TestSnapshotContext(snapshotKvTabletDirPath.toString()); ManuallyTriggeredScheduledExecutorService scheduledExecutorService = testKvSnapshotContext.scheduledExecutorService; TestingCompletedKvSnapshotCommitter kvSnapshotStore = testKvSnapshotContext.testKvSnapshotStore; // make a kv replica Replica kvReplica = makeKvReplica(DATA1_PHYSICAL_TABLE_PATH_PK, tableBucket, testKvSnapshotContext); makeKvReplicaAsLeader(kvReplica); putRecordsToLeader( kvReplica, DataTestUtils.genKvRecordBatch(new Object[] {1, "a"}, new Object[] {2, "b"})); makeKvReplicaAsFollower(kvReplica, 1); // make a kv replica again, should restore from log makeKvReplicaAsLeader(kvReplica, 2); assertThat(kvReplica.getKvTablet()).isNotNull(); KvTablet kvTablet = kvReplica.getKvTablet(); // check result List<Tuple2<byte[], byte[]>> expectedKeyValues = getKeyValuePairs(genKvRecords(new Object[] {1, "a"}, new Object[] {2, "b"})); verifyGetKeyValues(kvTablet, expectedKeyValues); // We have to remove the first scheduled snapshot task since it's for the previous kv tablet // whose rocksdb has been dropped. scheduledExecutorService.removeNonPeriodicScheduledTask(); // trigger one snapshot, scheduledExecutorService.triggerNonPeriodicScheduledTask(); // wait until the snapshot success kvSnapshotStore.waitUntilSnapshotComplete(tableBucket, 0); // write data again putRecordsToLeader( kvReplica, DataTestUtils.genKvRecordBatch(new Object[] {2, "bbb"}, new Object[] {3, "c"})); // restore again makeKvReplicaAsLeader(kvReplica, 3); expectedKeyValues = getKeyValuePairs( genKvRecords( new Object[] {1, "a"}, new Object[] {2, "bbb"}, new Object[] {3, "c"})); kvTablet = kvReplica.getKvTablet(); verifyGetKeyValues(kvTablet, expectedKeyValues); } private void makeLogReplicaAsLeader(Replica replica) throws Exception { makeLeaderReplica( replica, DATA1_TABLE_PATH, new TableBucket(DATA1_TABLE_ID, 1), INITIAL_LEADER_EPOCH); } private void makeKvReplicaAsLeader(Replica replica) throws Exception { makeLeaderReplica( replica, DATA1_TABLE_PATH_PK, new TableBucket(DATA1_TABLE_ID_PK, 1), INITIAL_LEADER_EPOCH); } private void makeKvReplicaAsLeader(Replica replica, int leaderEpoch) throws Exception { makeLeaderReplica( replica, DATA1_TABLE_PATH_PK, new TableBucket(DATA1_TABLE_ID_PK, 1), leaderEpoch); } private void makeKvReplicaAsFollower(Replica replica, int leaderEpoch) { replica.makeFollower( new NotifyLeaderAndIsrData( PhysicalTablePath.of(DATA1_TABLE_PATH_PK), new TableBucket(DATA1_TABLE_ID_PK, 1), Collections.singletonList(TABLET_SERVER_ID), new LeaderAndIsr( TABLET_SERVER_ID, leaderEpoch, Collections.singletonList(TABLET_SERVER_ID), INITIAL_COORDINATOR_EPOCH, // we also use the leader epoch as bucket epoch leaderEpoch))); } private void makeLeaderReplica( Replica replica, TablePath tablePath, TableBucket tableBucket, int leaderEpoch) throws Exception { replica.makeLeader( new NotifyLeaderAndIsrData( PhysicalTablePath.of(tablePath), tableBucket, Collections.singletonList(TABLET_SERVER_ID), new LeaderAndIsr( TABLET_SERVER_ID, leaderEpoch, Collections.singletonList(TABLET_SERVER_ID), INITIAL_COORDINATOR_EPOCH, // we also use the leader epoch as bucket epoch leaderEpoch))); } private static LogRecords fetchRecords(Replica replica) throws IOException { return fetchRecords(replica, 0); } private static LogRecords fetchRecords(Replica replica, long offset) throws IOException { FetchParams fetchParams = new FetchParams(-1, Integer.MAX_VALUE); fetchParams.setCurrentFetch( replica.getTableBucket().getTableId(), offset, Integer.MAX_VALUE, replica.getRowType(), DEFAULT_COMPRESSION, null); LogReadInfo logReadInfo = replica.fetchRecords(fetchParams); return logReadInfo.getFetchedData().getRecords(); } private static MemoryLogRecords logRecords( long baseOffset, List<ChangeType> changeTypes, List<Object[]> values) throws Exception { return createBasicMemoryLogRecords( DATA1_ROW_TYPE, DEFAULT_SCHEMA_ID, baseOffset, -1L, CURRENT_LOG_MAGIC_VALUE, NO_WRITER_ID, NO_BATCH_SEQUENCE, changeTypes, values, LogFormat.ARROW, DEFAULT_COMPRESSION); } private LogAppendInfo putRecordsToLeader( Replica replica, KvRecordBatch kvRecords, int[] targetColumns) throws Exception { LogAppendInfo logAppendInfo = replica.putRecordsToLeader(kvRecords, targetColumns, 0); KvTablet kvTablet = checkNotNull(replica.getKvTablet()); // flush to make data visible kvTablet.flush(replica.getLocalLogEndOffset(), NOPErrorHandler.INSTANCE); return logAppendInfo; } private LogAppendInfo putRecordsToLeader(Replica replica, KvRecordBatch kvRecords) throws Exception { return putRecordsToLeader(replica, kvRecords, null); } private void verifyGetKeyValues( KvTablet kvTablet, List<Tuple2<byte[], byte[]>> expectedKeyValues) throws IOException { List<byte[]> keys = new ArrayList<>(); List<byte[]> expectValues = new ArrayList<>(); for (Tuple2<byte[], byte[]> expectedKeyValue : expectedKeyValues) { keys.add(expectedKeyValue.f0); expectValues.add(expectedKeyValue.f1); } assertThat(kvTablet.multiGet(keys)).containsExactlyElementsOf(expectValues); } /** A scheduledExecutorService that will execute the scheduled task immediately. */ private static class ImmediateTriggeredScheduledExecutorService extends ManuallyTriggeredScheduledExecutorService { private boolean isScheduled = false; @Override public ScheduledFuture<?> schedule(Runnable command, long delay, TimeUnit unit) { // we only schedule task for once, if has scheduled, return null to skip schedule // the task if (isScheduled) { return null; } isScheduled = true; ScheduledFuture<?> scheduledFuture = super.schedule(command, delay, unit); triggerNonPeriodicScheduledTask(); return scheduledFuture; } public void reset() { isScheduled = false; } } }
googleapis/google-cloud-java
35,956
java-container/proto-google-cloud-container-v1/src/main/java/com/google/container/v1/ListClustersRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/container/v1/cluster_service.proto // Protobuf Java Version: 3.25.8 package com.google.container.v1; /** * * * <pre> * ListClustersRequest lists clusters. * </pre> * * Protobuf type {@code google.container.v1.ListClustersRequest} */ public final class ListClustersRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.container.v1.ListClustersRequest) ListClustersRequestOrBuilder { private static final long serialVersionUID = 0L; // Use ListClustersRequest.newBuilder() to construct. private ListClustersRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private ListClustersRequest() { projectId_ = ""; zone_ = ""; parent_ = ""; } @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new ListClustersRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_ListClustersRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_ListClustersRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.container.v1.ListClustersRequest.class, com.google.container.v1.ListClustersRequest.Builder.class); } public static final int PROJECT_ID_FIELD_NUMBER = 1; @SuppressWarnings("serial") private volatile java.lang.Object projectId_ = ""; /** * * * <pre> * Deprecated. The Google Developers Console [project ID or project * number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). * This field has been deprecated and replaced by the parent field. * </pre> * * <code>string project_id = 1 [deprecated = true];</code> * * @deprecated google.container.v1.ListClustersRequest.project_id is deprecated. See * google/container/v1/cluster_service.proto;l=4099 * @return The projectId. */ @java.lang.Override @java.lang.Deprecated public java.lang.String getProjectId() { java.lang.Object ref = projectId_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); projectId_ = s; return s; } } /** * * * <pre> * Deprecated. The Google Developers Console [project ID or project * number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). * This field has been deprecated and replaced by the parent field. * </pre> * * <code>string project_id = 1 [deprecated = true];</code> * * @deprecated google.container.v1.ListClustersRequest.project_id is deprecated. See * google/container/v1/cluster_service.proto;l=4099 * @return The bytes for projectId. */ @java.lang.Override @java.lang.Deprecated public com.google.protobuf.ByteString getProjectIdBytes() { java.lang.Object ref = projectId_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); projectId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int ZONE_FIELD_NUMBER = 2; @SuppressWarnings("serial") private volatile java.lang.Object zone_ = ""; /** * * * <pre> * Deprecated. The name of the Google Compute Engine * [zone](https://cloud.google.com/compute/docs/zones#available) * in which the cluster resides, or "-" for all zones. This field has been * deprecated and replaced by the parent field. * </pre> * * <code>string zone = 2 [deprecated = true];</code> * * @deprecated google.container.v1.ListClustersRequest.zone is deprecated. See * google/container/v1/cluster_service.proto;l=4105 * @return The zone. */ @java.lang.Override @java.lang.Deprecated public java.lang.String getZone() { java.lang.Object ref = zone_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); zone_ = s; return s; } } /** * * * <pre> * Deprecated. The name of the Google Compute Engine * [zone](https://cloud.google.com/compute/docs/zones#available) * in which the cluster resides, or "-" for all zones. This field has been * deprecated and replaced by the parent field. * </pre> * * <code>string zone = 2 [deprecated = true];</code> * * @deprecated google.container.v1.ListClustersRequest.zone is deprecated. See * google/container/v1/cluster_service.proto;l=4105 * @return The bytes for zone. */ @java.lang.Override @java.lang.Deprecated public com.google.protobuf.ByteString getZoneBytes() { java.lang.Object ref = zone_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); zone_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } public static final int PARENT_FIELD_NUMBER = 4; @SuppressWarnings("serial") private volatile java.lang.Object parent_ = ""; /** * * * <pre> * The parent (project and location) where the clusters will be listed. * Specified in the format `projects/&#42;&#47;locations/&#42;`. * Location "-" matches all zones and all regions. * </pre> * * <code>string parent = 4;</code> * * @return The parent. */ @java.lang.Override public java.lang.String getParent() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { return (java.lang.String) ref; } else { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } } /** * * * <pre> * The parent (project and location) where the clusters will be listed. * Specified in the format `projects/&#42;&#47;locations/&#42;`. * Location "-" matches all zones and all regions. * </pre> * * <code>string parent = 4;</code> * * @return The bytes for parent. */ @java.lang.Override public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof java.lang.String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(projectId_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 1, projectId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(zone_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 2, zone_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { com.google.protobuf.GeneratedMessageV3.writeString(output, 4, parent_); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(projectId_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(1, projectId_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(zone_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(2, zone_); } if (!com.google.protobuf.GeneratedMessageV3.isStringEmpty(parent_)) { size += com.google.protobuf.GeneratedMessageV3.computeStringSize(4, parent_); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.container.v1.ListClustersRequest)) { return super.equals(obj); } com.google.container.v1.ListClustersRequest other = (com.google.container.v1.ListClustersRequest) obj; if (!getProjectId().equals(other.getProjectId())) return false; if (!getZone().equals(other.getZone())) return false; if (!getParent().equals(other.getParent())) return false; if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); hash = (37 * hash) + PROJECT_ID_FIELD_NUMBER; hash = (53 * hash) + getProjectId().hashCode(); hash = (37 * hash) + ZONE_FIELD_NUMBER; hash = (53 * hash) + getZone().hashCode(); hash = (37 * hash) + PARENT_FIELD_NUMBER; hash = (53 * hash) + getParent().hashCode(); hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.container.v1.ListClustersRequest parseFrom(java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.container.v1.ListClustersRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.container.v1.ListClustersRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.container.v1.ListClustersRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.container.v1.ListClustersRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.container.v1.ListClustersRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.container.v1.ListClustersRequest parseFrom(java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.container.v1.ListClustersRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.container.v1.ListClustersRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.container.v1.ListClustersRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.container.v1.ListClustersRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.container.v1.ListClustersRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder(com.google.container.v1.ListClustersRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * ListClustersRequest lists clusters. * </pre> * * Protobuf type {@code google.container.v1.ListClustersRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.container.v1.ListClustersRequest) com.google.container.v1.ListClustersRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_ListClustersRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_ListClustersRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.container.v1.ListClustersRequest.class, com.google.container.v1.ListClustersRequest.Builder.class); } // Construct using com.google.container.v1.ListClustersRequest.newBuilder() private Builder() {} private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; projectId_ = ""; zone_ = ""; parent_ = ""; return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.container.v1.ClusterServiceProto .internal_static_google_container_v1_ListClustersRequest_descriptor; } @java.lang.Override public com.google.container.v1.ListClustersRequest getDefaultInstanceForType() { return com.google.container.v1.ListClustersRequest.getDefaultInstance(); } @java.lang.Override public com.google.container.v1.ListClustersRequest build() { com.google.container.v1.ListClustersRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.container.v1.ListClustersRequest buildPartial() { com.google.container.v1.ListClustersRequest result = new com.google.container.v1.ListClustersRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0(com.google.container.v1.ListClustersRequest result) { int from_bitField0_ = bitField0_; if (((from_bitField0_ & 0x00000001) != 0)) { result.projectId_ = projectId_; } if (((from_bitField0_ & 0x00000002) != 0)) { result.zone_ = zone_; } if (((from_bitField0_ & 0x00000004) != 0)) { result.parent_ = parent_; } } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.container.v1.ListClustersRequest) { return mergeFrom((com.google.container.v1.ListClustersRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.container.v1.ListClustersRequest other) { if (other == com.google.container.v1.ListClustersRequest.getDefaultInstance()) return this; if (!other.getProjectId().isEmpty()) { projectId_ = other.projectId_; bitField0_ |= 0x00000001; onChanged(); } if (!other.getZone().isEmpty()) { zone_ = other.zone_; bitField0_ |= 0x00000002; onChanged(); } if (!other.getParent().isEmpty()) { parent_ = other.parent_; bitField0_ |= 0x00000004; onChanged(); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { projectId_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000001; break; } // case 10 case 18: { zone_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000002; break; } // case 18 case 34: { parent_ = input.readStringRequireUtf8(); bitField0_ |= 0x00000004; break; } // case 34 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private java.lang.Object projectId_ = ""; /** * * * <pre> * Deprecated. The Google Developers Console [project ID or project * number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). * This field has been deprecated and replaced by the parent field. * </pre> * * <code>string project_id = 1 [deprecated = true];</code> * * @deprecated google.container.v1.ListClustersRequest.project_id is deprecated. See * google/container/v1/cluster_service.proto;l=4099 * @return The projectId. */ @java.lang.Deprecated public java.lang.String getProjectId() { java.lang.Object ref = projectId_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); projectId_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Deprecated. The Google Developers Console [project ID or project * number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). * This field has been deprecated and replaced by the parent field. * </pre> * * <code>string project_id = 1 [deprecated = true];</code> * * @deprecated google.container.v1.ListClustersRequest.project_id is deprecated. See * google/container/v1/cluster_service.proto;l=4099 * @return The bytes for projectId. */ @java.lang.Deprecated public com.google.protobuf.ByteString getProjectIdBytes() { java.lang.Object ref = projectId_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); projectId_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Deprecated. The Google Developers Console [project ID or project * number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). * This field has been deprecated and replaced by the parent field. * </pre> * * <code>string project_id = 1 [deprecated = true];</code> * * @deprecated google.container.v1.ListClustersRequest.project_id is deprecated. See * google/container/v1/cluster_service.proto;l=4099 * @param value The projectId to set. * @return This builder for chaining. */ @java.lang.Deprecated public Builder setProjectId(java.lang.String value) { if (value == null) { throw new NullPointerException(); } projectId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Deprecated. The Google Developers Console [project ID or project * number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). * This field has been deprecated and replaced by the parent field. * </pre> * * <code>string project_id = 1 [deprecated = true];</code> * * @deprecated google.container.v1.ListClustersRequest.project_id is deprecated. See * google/container/v1/cluster_service.proto;l=4099 * @return This builder for chaining. */ @java.lang.Deprecated public Builder clearProjectId() { projectId_ = getDefaultInstance().getProjectId(); bitField0_ = (bitField0_ & ~0x00000001); onChanged(); return this; } /** * * * <pre> * Deprecated. The Google Developers Console [project ID or project * number](https://cloud.google.com/resource-manager/docs/creating-managing-projects). * This field has been deprecated and replaced by the parent field. * </pre> * * <code>string project_id = 1 [deprecated = true];</code> * * @deprecated google.container.v1.ListClustersRequest.project_id is deprecated. See * google/container/v1/cluster_service.proto;l=4099 * @param value The bytes for projectId to set. * @return This builder for chaining. */ @java.lang.Deprecated public Builder setProjectIdBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); projectId_ = value; bitField0_ |= 0x00000001; onChanged(); return this; } private java.lang.Object zone_ = ""; /** * * * <pre> * Deprecated. The name of the Google Compute Engine * [zone](https://cloud.google.com/compute/docs/zones#available) * in which the cluster resides, or "-" for all zones. This field has been * deprecated and replaced by the parent field. * </pre> * * <code>string zone = 2 [deprecated = true];</code> * * @deprecated google.container.v1.ListClustersRequest.zone is deprecated. See * google/container/v1/cluster_service.proto;l=4105 * @return The zone. */ @java.lang.Deprecated public java.lang.String getZone() { java.lang.Object ref = zone_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); zone_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * Deprecated. The name of the Google Compute Engine * [zone](https://cloud.google.com/compute/docs/zones#available) * in which the cluster resides, or "-" for all zones. This field has been * deprecated and replaced by the parent field. * </pre> * * <code>string zone = 2 [deprecated = true];</code> * * @deprecated google.container.v1.ListClustersRequest.zone is deprecated. See * google/container/v1/cluster_service.proto;l=4105 * @return The bytes for zone. */ @java.lang.Deprecated public com.google.protobuf.ByteString getZoneBytes() { java.lang.Object ref = zone_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); zone_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * Deprecated. The name of the Google Compute Engine * [zone](https://cloud.google.com/compute/docs/zones#available) * in which the cluster resides, or "-" for all zones. This field has been * deprecated and replaced by the parent field. * </pre> * * <code>string zone = 2 [deprecated = true];</code> * * @deprecated google.container.v1.ListClustersRequest.zone is deprecated. See * google/container/v1/cluster_service.proto;l=4105 * @param value The zone to set. * @return This builder for chaining. */ @java.lang.Deprecated public Builder setZone(java.lang.String value) { if (value == null) { throw new NullPointerException(); } zone_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Deprecated. The name of the Google Compute Engine * [zone](https://cloud.google.com/compute/docs/zones#available) * in which the cluster resides, or "-" for all zones. This field has been * deprecated and replaced by the parent field. * </pre> * * <code>string zone = 2 [deprecated = true];</code> * * @deprecated google.container.v1.ListClustersRequest.zone is deprecated. See * google/container/v1/cluster_service.proto;l=4105 * @return This builder for chaining. */ @java.lang.Deprecated public Builder clearZone() { zone_ = getDefaultInstance().getZone(); bitField0_ = (bitField0_ & ~0x00000002); onChanged(); return this; } /** * * * <pre> * Deprecated. The name of the Google Compute Engine * [zone](https://cloud.google.com/compute/docs/zones#available) * in which the cluster resides, or "-" for all zones. This field has been * deprecated and replaced by the parent field. * </pre> * * <code>string zone = 2 [deprecated = true];</code> * * @deprecated google.container.v1.ListClustersRequest.zone is deprecated. See * google/container/v1/cluster_service.proto;l=4105 * @param value The bytes for zone to set. * @return This builder for chaining. */ @java.lang.Deprecated public Builder setZoneBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); zone_ = value; bitField0_ |= 0x00000002; onChanged(); return this; } private java.lang.Object parent_ = ""; /** * * * <pre> * The parent (project and location) where the clusters will be listed. * Specified in the format `projects/&#42;&#47;locations/&#42;`. * Location "-" matches all zones and all regions. * </pre> * * <code>string parent = 4;</code> * * @return The parent. */ public java.lang.String getParent() { java.lang.Object ref = parent_; if (!(ref instanceof java.lang.String)) { com.google.protobuf.ByteString bs = (com.google.protobuf.ByteString) ref; java.lang.String s = bs.toStringUtf8(); parent_ = s; return s; } else { return (java.lang.String) ref; } } /** * * * <pre> * The parent (project and location) where the clusters will be listed. * Specified in the format `projects/&#42;&#47;locations/&#42;`. * Location "-" matches all zones and all regions. * </pre> * * <code>string parent = 4;</code> * * @return The bytes for parent. */ public com.google.protobuf.ByteString getParentBytes() { java.lang.Object ref = parent_; if (ref instanceof String) { com.google.protobuf.ByteString b = com.google.protobuf.ByteString.copyFromUtf8((java.lang.String) ref); parent_ = b; return b; } else { return (com.google.protobuf.ByteString) ref; } } /** * * * <pre> * The parent (project and location) where the clusters will be listed. * Specified in the format `projects/&#42;&#47;locations/&#42;`. * Location "-" matches all zones and all regions. * </pre> * * <code>string parent = 4;</code> * * @param value The parent to set. * @return This builder for chaining. */ public Builder setParent(java.lang.String value) { if (value == null) { throw new NullPointerException(); } parent_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } /** * * * <pre> * The parent (project and location) where the clusters will be listed. * Specified in the format `projects/&#42;&#47;locations/&#42;`. * Location "-" matches all zones and all regions. * </pre> * * <code>string parent = 4;</code> * * @return This builder for chaining. */ public Builder clearParent() { parent_ = getDefaultInstance().getParent(); bitField0_ = (bitField0_ & ~0x00000004); onChanged(); return this; } /** * * * <pre> * The parent (project and location) where the clusters will be listed. * Specified in the format `projects/&#42;&#47;locations/&#42;`. * Location "-" matches all zones and all regions. * </pre> * * <code>string parent = 4;</code> * * @param value The bytes for parent to set. * @return This builder for chaining. */ public Builder setParentBytes(com.google.protobuf.ByteString value) { if (value == null) { throw new NullPointerException(); } checkByteStringIsUtf8(value); parent_ = value; bitField0_ |= 0x00000004; onChanged(); return this; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.container.v1.ListClustersRequest) } // @@protoc_insertion_point(class_scope:google.container.v1.ListClustersRequest) private static final com.google.container.v1.ListClustersRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.container.v1.ListClustersRequest(); } public static com.google.container.v1.ListClustersRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<ListClustersRequest> PARSER = new com.google.protobuf.AbstractParser<ListClustersRequest>() { @java.lang.Override public ListClustersRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<ListClustersRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<ListClustersRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.container.v1.ListClustersRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
apache/geode
35,241
extensions/geode-modules-session/src/integrationTest/java/org/apache/geode/modules/session/internal/filter/SessionReplicationIntegrationJUnitTest.java
/* * Licensed to the Apache Software Foundation (ASF) under one or more contributor license * agreements. See the NOTICE file distributed with this work for additional information regarding * copyright ownership. The ASF licenses this file to You under the Apache License, Version 2.0 (the * "License"); you may not use this file except in compliance with the License. You may obtain a * copy of the License at * * http://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software distributed under the License * is distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express * or implied. See the License for the specific language governing permissions and limitations under * the License. */ package org.apache.geode.modules.session.internal.filter; import static org.junit.Assert.assertEquals; import static org.junit.Assert.assertFalse; import static org.junit.Assert.assertNotNull; import static org.junit.Assert.assertNull; import static org.junit.Assert.assertTrue; import java.io.File; import java.io.PrintWriter; import java.util.ArrayList; import java.util.EnumSet; import java.util.Enumeration; import java.util.List; import java.util.StringTokenizer; import javax.servlet.DispatcherType; import javax.servlet.RequestDispatcher; import javax.servlet.http.Cookie; import javax.servlet.http.HttpSession; import com.meterware.httpunit.GetMethodWebRequest; import com.meterware.httpunit.WebConversation; import com.meterware.httpunit.WebRequest; import com.meterware.httpunit.WebResponse; import org.apache.jasper.servlet.JspServlet; import org.eclipse.jetty.http.HttpTester; import org.eclipse.jetty.servlet.FilterHolder; import org.eclipse.jetty.servlet.ServletHolder; import org.junit.After; import org.junit.Assume; import org.junit.Before; import org.junit.Rule; import org.junit.Test; import org.junit.experimental.categories.Category; import org.junit.rules.TemporaryFolder; import org.junit.runner.RunWith; import org.apache.geode.cache.Region; import org.apache.geode.modules.session.filter.SessionCachingFilter; import org.apache.geode.modules.session.junit.PerTestClassLoaderRunner; import org.apache.geode.test.junit.categories.SessionTest; import org.apache.geode.util.internal.GeodeGlossary; /** * In-container testing using Jetty. This allows us to test context listener events as well as * dispatching actions. */ @Category({SessionTest.class}) @RunWith(PerTestClassLoaderRunner.class) public class SessionReplicationIntegrationJUnitTest { private MyServletTester tester; private HttpTester.Request request; private HttpTester.Response response; private ServletHolder servletHolder; private FilterHolder filterHolder; @Rule public TemporaryFolder tmpdir = new TemporaryFolder(); @Before public void setUp() throws Exception { File gemfireLogFile = new File(tmpdir.newFolder(), "gemfire_modules.log"); request = HttpTester.newRequest(); tester = new MyServletTester(); tester.setContextPath("/test"); Assume.assumeFalse(System.getProperty("os.name").toLowerCase().contains("win")); filterHolder = tester.addFilter(SessionCachingFilter.class, "/*", EnumSet.of(DispatcherType.REQUEST)); filterHolder.setInitParameter(GeodeGlossary.GEMFIRE_PREFIX + "property.mcast-port", "0"); filterHolder.setInitParameter(GeodeGlossary.GEMFIRE_PREFIX + "property.log-file", gemfireLogFile.getAbsolutePath()); filterHolder.setInitParameter("cache-type", "peer-to-peer"); servletHolder = tester.addServlet(BasicServlet.class, "/hello"); servletHolder.setInitParameter("test.callback", "callback_1"); /* * This starts the servlet. Our wrapped servlets *must* start immediately otherwise the * ServletContext is not captured correctly. */ servletHolder.setInitOrder(0); } @After public void tearDown() throws Exception { tester.stop(); } @Test public void testSanity() throws Exception { Callback c = (request, response) -> { PrintWriter out = response.getWriter(); out.write("Hello World"); }; tester.setAttribute("callback_1", c); tester.start(); request.setMethod("GET"); request.setURI("/test/hello"); request.setHeader("Host", "tester"); request.setVersion("HTTP/1.0"); response = HttpTester.parseResponse(tester.getResponses(request.generate())); assertEquals(200, response.getStatus()); assertEquals("Hello World", response.getContent()); } @Test public void testSessionGenerated() throws Exception { Callback c = (request, response) -> { PrintWriter out = response.getWriter(); out.write(request.getSession().getId()); }; tester.setAttribute("callback_1", c); tester.start(); request.setMethod("GET"); request.setURI("/test/hello"); request.setHeader("Host", "tester"); request.setVersion("HTTP/1.0"); response = HttpTester.parseResponse(tester.getResponses(request.generate())); assertTrue("Not a correctly generated session id", response.getContent().endsWith("-GF")); List<Cookie> cookies = getCookies(response); assertEquals("Session id != JSESSIONID from cookie", response.getContent(), getSessionCookie(cookies).getValue()); Region r = getRegion(); assertNotNull("Session not found in region", r.get(getSessionCookie(cookies).getValue())); } private Cookie getSessionCookie(List<Cookie> cookies) { return cookies.stream().filter(cookie -> cookie.getName().equals("JSESSIONID")) .reduce((oldCookie, cookie) -> cookie).get(); } /** * Test that getSession(false) does not create a new session */ @Test public void testSessionNotGenerated() throws Exception { Callback c = (request, response) -> { String output = "OK"; HttpSession s = request.getSession(false); if (s != null) { output = s.getId(); } PrintWriter out = response.getWriter(); out.write(output); }; tester.setAttribute("callback_1", c); tester.start(); request.setMethod("GET"); request.setURI("/test/hello"); request.setHeader("Host", "tester"); request.setVersion("HTTP/1.0"); response = HttpTester.parseResponse(tester.getResponses(request.generate())); assertEquals("Session should not have been created", "OK", response.getContent()); } @Test public void testUnknownAttributeIsNull() throws Exception { Callback c = (request, response) -> { Object o = request.getSession().getAttribute("unknown"); PrintWriter out = response.getWriter(); if (o == null) { out.write("null"); } else { out.write(o.toString()); } }; tester.setAttribute("callback_1", c); tester.start(); request.setMethod("GET"); request.setURI("/test/hello"); request.setHeader("Host", "tester"); request.setVersion("HTTP/1.0"); response = HttpTester.parseResponse(tester.getResponses(request.generate())); assertEquals("Unknown attribute should be null", "null", response.getContent()); } @Test public void testSessionRemains1() throws Exception { Callback c = (request, response) -> { String output; HttpSession session = request.getSession(); if (session.isNew()) { output = "new"; session.setAttribute("foo", output); } else { output = (String) session.getAttribute("foo"); if (output != null) { output = "old"; } } PrintWriter out = response.getWriter(); out.write(output); }; tester.setAttribute("callback_1", c); tester.start(); request.setMethod("GET"); request.setURI("/test/hello"); request.setHeader("Host", "tester"); request.setVersion("HTTP/1.0"); response = HttpTester.parseResponse(tester.getResponses(request.generate())); assertEquals("Session should be new", "new", response.getContent()); List<Cookie> cookies = getCookies(response); request.setHeader("Cookie", "JSESSIONID=" + getSessionCookie(cookies).getValue()); response = HttpTester.parseResponse(tester.getResponses(request.generate())); assertEquals("Session should be old", "old", response.getContent()); List<Cookie> cookies2 = getCookies(response); assertEquals("Session IDs should be the same", getSessionCookie(cookies).getValue(), getSessionCookie(cookies2).getValue()); Region r = getRegion(); assertNotNull("Session object should exist in region", r.get(getSessionCookie(cookies).getValue())); } /** * Test that attributes are updated on the backend */ @Test public void testAttributesUpdatedInRegion() throws Exception { Callback c_1 = (request, response) -> request.getSession().setAttribute("foo", "bar"); // This is the callback used to invalidate the session Callback c_2 = (request, response) -> request.getSession().setAttribute("foo", "baz"); tester.setAttribute("callback_1", c_1); tester.setAttribute("callback_2", c_2); servletHolder.setInitParameter("test.callback", "callback_1"); ServletHolder sh2 = tester.addServlet(BasicServlet.class, "/request2"); sh2.setInitParameter("test.callback", "callback_2"); tester.start(); request.setMethod("GET"); request.setURI("/test/hello"); request.setHeader("Host", "tester"); request.setVersion("HTTP/1.0"); response = HttpTester.parseResponse(tester.getResponses(request.generate())); List<Cookie> cookies = getCookies(response); Region r = getRegion(); assertEquals("bar", ((HttpSession) r.get(getSessionCookie(cookies).getValue())).getAttribute("foo")); request.setHeader("Cookie", "JSESSIONID=" + getSessionCookie(cookies).getValue()); request.setURI("/test/request2"); response = HttpTester.parseResponse(tester.getResponses(request.generate())); assertEquals("baz", ((HttpSession) r.get(getSessionCookie(cookies).getValue())).getAttribute("foo")); } /** * Test setting an attribute to null deletes it */ @Test public void testSetAttributeNullDeletesIt() throws Exception { Callback c_1 = (request, response) -> request.getSession().setAttribute("foo", "bar"); // This is the callback used to invalidate the session Callback c_2 = (request, response) -> request.getSession().setAttribute("foo", null); tester.setAttribute("callback_1", c_1); tester.setAttribute("callback_2", c_2); servletHolder.setInitParameter("test.callback", "callback_1"); ServletHolder sh2 = tester.addServlet(BasicServlet.class, "/request2"); sh2.setInitParameter("test.callback", "callback_2"); tester.start(); request.setMethod("GET"); request.setURI("/test/hello"); request.setHeader("Host", "tester"); request.setVersion("HTTP/1.0"); response = HttpTester.parseResponse(tester.getResponses(request.generate())); List<Cookie> cookies = getCookies(response); Region r = getRegion(); assertEquals("bar", ((HttpSession) r.get(getSessionCookie(cookies).getValue())).getAttribute("foo")); request.setHeader("Cookie", "JSESSIONID=" + getSessionCookie(cookies).getValue()); request.setURI("/test/request2"); response = HttpTester.parseResponse(tester.getResponses(request.generate())); assertNull(((HttpSession) r.get(getSessionCookie(cookies).getValue())).getAttribute("foo")); } /** * Test that a servlet can modify cookies */ @Test public void testUserCanModifyTheirOwnCookie() throws Exception { Callback c = (request, response) -> { Cookie userCookie = findUserCookie(request.getCookies()); if (userCookie == null) { userCookie = new Cookie("myCookie", "0"); } else { userCookie = new Cookie("myCookie", Integer.toString(Integer.parseInt(userCookie.getValue()) + 1)); } response.addCookie(userCookie); request.getSession().setAttribute("dummy", "value"); }; tester.setAttribute("callback_1", c); String url = tester.createConnector(true); tester.start(); WebConversation wc = new WebConversation(); WebRequest req = new GetMethodWebRequest(url + "/test/hello"); req.setHeaderField("Cookie", "myCookie=" + 5); final WebResponse webResponse = wc.getResponse(req); assertEquals("6", webResponse.getNewCookieValue("myCookie")); } private Cookie findUserCookie(Cookie[] cookies) { if (cookies == null) { return null; } Cookie userCookie = null; for (Cookie cookie : cookies) { if (cookie.getName().equals("myCookie")) { userCookie = cookie; } } return userCookie; } /** * Test that invalidating a session destroys it as well as the backend object. */ @Test public void testInvalidateSession1() throws Exception { Callback c_1 = (request, response) -> request.getSession().setAttribute("foo", "bar"); // This is the callback used to invalidate the session Callback c_2 = (request, response) -> request.getSession(false).invalidate(); tester.setAttribute("callback_1", c_1); tester.setAttribute("callback_2", c_2); servletHolder.setInitParameter("test.callback", "callback_1"); ServletHolder sh2 = tester.addServlet(BasicServlet.class, "/request2"); sh2.setInitParameter("test.callback", "callback_2"); tester.start(); request.setMethod("GET"); request.setURI("/test/hello"); request.setHeader("Host", "tester"); request.setVersion("HTTP/1.0"); response = HttpTester.parseResponse(tester.getResponses(request.generate())); List<Cookie> cookies = getCookies(response); Region r = getRegion(); assertEquals("bar", ((HttpSession) r.get(getSessionCookie(cookies).getValue())).getAttribute("foo")); request.setHeader("Cookie", "JSESSIONID=" + getSessionCookie(cookies).getValue()); request.setURI("/test/request2"); response = HttpTester.parseResponse(tester.getResponses(request.generate())); assertNull("Region should not contain session", r.get(getSessionCookie(cookies).getValue())); } /** * Test that invalidating a session throws an exception on subsequent access. */ @Test public void testInvalidateSession2() throws Exception { Callback c_1 = (request, response) -> { HttpSession s = request.getSession(); s.invalidate(); PrintWriter out = response.getWriter(); try { s.getAttribute("foo"); } catch (IllegalStateException iex) { out.write("OK"); } }; tester.setAttribute("callback_1", c_1); servletHolder.setInitParameter("test.callback", "callback_1"); tester.start(); request.setMethod("GET"); request.setURI("/test/hello"); request.setHeader("Host", "tester"); request.setVersion("HTTP/1.0"); response = HttpTester.parseResponse(tester.getResponses(request.generate())); assertEquals("OK", response.getContent()); } /** * Test that invalidating a session throws an exception on subsequent access. */ @Test public void testInvalidateSession3() throws Exception { Callback c_1 = (request, response) -> { HttpSession s = request.getSession(); s.invalidate(); PrintWriter out = response.getWriter(); try { s.getAttributeNames(); } catch (IllegalStateException iex) { out.write("OK"); } }; tester.setAttribute("callback_1", c_1); servletHolder.setInitParameter("test.callback", "callback_1"); tester.start(); // ContextManager.getInstance().putContext( // servletHolder.getServlet().getServletConfig().getServletContext()); request.setMethod("GET"); request.setURI("/test/hello"); request.setHeader("Host", "tester"); request.setVersion("HTTP/1.0"); response = HttpTester.parseResponse(tester.getResponses(request.generate())); assertEquals("OK", response.getContent()); } /** * Test that invalidating a session throws an exception on subsequent access. */ @Test public void testInvalidateSession4() throws Exception { Callback c_1 = (request, response) -> { HttpSession s = request.getSession(); s.invalidate(); PrintWriter out = response.getWriter(); try { s.getCreationTime(); } catch (IllegalStateException iex) { out.write("OK"); } }; tester.setAttribute("callback_1", c_1); servletHolder.setInitParameter("test.callback", "callback_1"); tester.start(); // ContextManager.getInstance().putContext( // servletHolder.getServlet().getServletConfig().getServletContext()); request.setMethod("GET"); request.setURI("/test/hello"); request.setHeader("Host", "tester"); request.setVersion("HTTP/1.0"); response = HttpTester.parseResponse(tester.getResponses(request.generate())); assertEquals("OK", response.getContent()); } /** * Test that invalidating a session does not throw an exception for subsequent getId calls. */ @Test public void testInvalidateSession5() throws Exception { Callback c_1 = (request, response) -> { HttpSession s = request.getSession(); s.invalidate(); s.getId(); PrintWriter out = response.getWriter(); out.write("OK"); }; tester.setAttribute("callback_1", c_1); servletHolder.setInitParameter("test.callback", "callback_1"); tester.start(); request.setMethod("GET"); request.setURI("/test/hello"); request.setHeader("Host", "tester"); request.setVersion("HTTP/1.0"); response = HttpTester.parseResponse(tester.getResponses(request.generate())); assertEquals("OK", response.getContent()); } /** * Test that invalidating a session throws an exception on subsequent access. */ @Test public void testInvalidateSession6() throws Exception { Callback c_1 = (request, response) -> { HttpSession s = request.getSession(); s.invalidate(); PrintWriter out = response.getWriter(); try { s.getLastAccessedTime(); } catch (IllegalStateException iex) { out.write("OK"); } }; tester.setAttribute("callback_1", c_1); servletHolder.setInitParameter("test.callback", "callback_1"); tester.start(); request.setMethod("GET"); request.setURI("/test/hello"); request.setHeader("Host", "tester"); request.setVersion("HTTP/1.0"); response = HttpTester.parseResponse(tester.getResponses(request.generate())); assertEquals("OK", response.getContent()); } /** * Test that invalidating a session does not throw an exception for subsequent * getMaxInactiveInterval calls. */ // I've commented this out for now as Jetty seems to want to throw an // Exception here where the HttpServlet api doesn't specify that. @Test public void testInvalidateSession7() throws Exception { Callback c_1 = (request, response) -> { HttpSession s = request.getSession(); s.invalidate(); s.getMaxInactiveInterval(); PrintWriter out = response.getWriter(); out.write("OK"); }; tester.setAttribute("callback_1", c_1); servletHolder.setInitParameter("test.callback", "callback_1"); tester.start(); request.setMethod("GET"); request.setURI("/test/hello"); request.setHeader("Host", "tester"); request.setVersion("HTTP/1.0"); response = HttpTester.parseResponse(tester.getResponses(request.generate())); assertEquals("OK", response.getContent()); } /** * Test that invalidating a session does not throw an exception for subsequent getServletContext * calls. */ @Test public void testInvalidateSession8() throws Exception { Callback c_1 = (request, response) -> { HttpSession s = request.getSession(); s.invalidate(); s.getServletContext(); PrintWriter out = response.getWriter(); out.write("OK"); }; tester.setAttribute("callback_1", c_1); servletHolder.setInitParameter("test.callback", "callback_1"); tester.start(); request.setMethod("GET"); request.setURI("/test/hello"); request.setHeader("Host", "tester"); request.setVersion("HTTP/1.0"); response = HttpTester.parseResponse(tester.getResponses(request.generate())); assertEquals("OK", response.getContent()); } /** * Test that invalidating a session throws an exception on subsequent access. */ @Test public void testInvalidateSession9() throws Exception { Callback c_1 = (request, response) -> { HttpSession s = request.getSession(); s.invalidate(); PrintWriter out = response.getWriter(); try { s.isNew(); } catch (IllegalStateException iex) { out.write("OK"); } }; tester.setAttribute("callback_1", c_1); servletHolder.setInitParameter("test.callback", "callback_1"); String url = tester.createConnector(true); tester.start(); WebConversation wc = new WebConversation(); WebRequest req = new GetMethodWebRequest(url + "/test/hello"); req.setHeaderField("Host", "tester"); final WebResponse webResponse = wc.getResponse(req); assertEquals("OK", webResponse.getResponseMessage()); } /** * Test that invalidating a session throws an exception on subsequent access. */ @Test public void testInvalidateSession10() throws Exception { Callback c_1 = (request, response) -> { HttpSession s = request.getSession(); s.invalidate(); PrintWriter out = response.getWriter(); try { s.removeAttribute("foo"); } catch (IllegalStateException iex) { out.write("OK"); } }; tester.setAttribute("callback_1", c_1); servletHolder.setInitParameter("test.callback", "callback_1"); tester.start(); request.setMethod("GET"); request.setURI("/test/hello"); request.setHeader("Host", "tester"); request.setVersion("HTTP/1.0"); response = HttpTester.parseResponse(tester.getResponses(request.generate())); assertEquals("OK", response.getContent()); } /** * Test that invalidating a session throws an exception on subsequent access. */ @Test public void testInvalidateSession11() throws Exception { Callback c_1 = (request, response) -> { HttpSession s = request.getSession(); s.invalidate(); PrintWriter out = response.getWriter(); try { s.setAttribute("foo", "bar"); } catch (IllegalStateException iex) { out.write("OK"); } }; tester.setAttribute("callback_1", c_1); servletHolder.setInitParameter("test.callback", "callback_1"); tester.start(); request.setMethod("GET"); request.setURI("/test/hello"); request.setHeader("Host", "tester"); request.setVersion("HTTP/1.0"); response = HttpTester.parseResponse(tester.getResponses(request.generate())); assertEquals("OK", response.getContent()); } /** * Test that invalidating a session does not throw an exception for subsequent * setMaxInactiveInterval calls. */ @Test public void testInvalidateSession12() throws Exception { Callback c_1 = (request, response) -> { HttpSession s = request.getSession(); s.invalidate(); s.setMaxInactiveInterval(1); PrintWriter out = response.getWriter(); out.write("OK"); }; tester.setAttribute("callback_1", c_1); servletHolder.setInitParameter("test.callback", "callback_1"); tester.start(); request.setMethod("GET"); request.setURI("/test/hello"); request.setHeader("Host", "tester"); request.setVersion("HTTP/1.0"); response = HttpTester.parseResponse(tester.getResponses(request.generate())); assertEquals("OK", response.getContent()); } /** * Test that invalidating a session results in null being returned on subsequent getSession(false) * calls. */ @Test public void testInvalidateSession13() throws Exception { Callback c_1 = (request, response) -> { HttpSession s = request.getSession(); s.invalidate(); s = request.getSession(false); PrintWriter out = response.getWriter(); if (s == null) { out.write("OK"); } else { out.write(s.toString()); } }; tester.setAttribute("callback_1", c_1); servletHolder.setInitParameter("test.callback", "callback_1"); tester.start(); // ContextManager.getInstance().putContext( // servletHolder.getServlet().getServletConfig().getServletContext()); request.setMethod("GET"); request.setURI("/test/hello"); request.setHeader("Host", "tester"); request.setVersion("HTTP/1.0"); response = HttpTester.parseResponse(tester.getResponses(request.generate())); assertEquals("OK", response.getContent()); } /** * Test that we can invalidate and then recreate a new session */ @Test public void testInvalidateAndRecreateSession() throws Exception { Callback c_1 = (request, response) -> { PrintWriter out = response.getWriter(); out.write(request.getSession().getId()); }; Callback c_2 = (request, response) -> { HttpSession s = request.getSession(); s.invalidate(); PrintWriter out = response.getWriter(); out.write(request.getSession().getId()); }; tester.setAttribute("callback_1", c_1); tester.setAttribute("callback_2", c_2); ServletHolder sh = tester.addServlet(BasicServlet.class, "/dispatch"); sh.setInitParameter("test.callback", "callback_2"); tester.start(); // ContextManager.getInstance().putContext( // sh.getServlet().getServletConfig().getServletContext()); request.setMethod("GET"); request.setURI("/test/hello"); request.setHeader("Host", "tester"); request.setVersion("HTTP/1.0"); response = HttpTester.parseResponse(tester.getResponses(request.generate())); String session1 = response.getContent(); request.setHeader("Cookie", "JSESSIONID=" + session1); request.setURI("/test/request2"); response = HttpTester.parseResponse(tester.getResponses(request.generate())); String session12 = response.getContent(); assertFalse("First and subsequent session ids must not be the same", session1.equals(session12)); } /** * Test that creation time does not change on subsequent access */ @Test public void testGetCreationTime() throws Exception { Callback c = (request, response) -> { HttpSession session = request.getSession(); PrintWriter out = response.getWriter(); out.write(Long.toString(session.getCreationTime())); }; tester.setAttribute("callback_1", c); tester.start(); // ContextManager.getInstance().putContext( // servletHolder.getServlet().getServletConfig().getServletContext()); request.setMethod("GET"); request.setURI("/test/hello"); request.setHeader("Host", "tester"); request.setVersion("HTTP/1.0"); response = HttpTester.parseResponse(tester.getResponses(request.generate())); long time1 = Long.parseLong(response.getContent()); assertTrue("Creation time should be positive", time1 > 0); List<Cookie> cookies = getCookies(response); request.setHeader("Cookie", "JSESSIONID=" + getSessionCookie(cookies).getValue()); try { Thread.sleep(1000); } catch (Exception ignored) { } response = HttpTester.parseResponse(tester.getResponses(request.generate())); long time2 = Long.parseLong(response.getContent()); assertTrue("Creation time should be the same across requests", time1 == time2); } /** * Test that the last accessed time is updated on subsequent access */ @Test public void testGetLastAccessedTime() throws Exception { Callback c = (request, response) -> { HttpSession session = request.getSession(); PrintWriter out = response.getWriter(); out.write(Long.toString(session.getLastAccessedTime())); }; tester.setAttribute("callback_1", c); tester.start(); request.setMethod("GET"); request.setURI("/test/hello"); request.setHeader("Host", "tester"); request.setVersion("HTTP/1.0"); response = HttpTester.parseResponse(tester.getResponses(request.generate())); long time1 = Long.parseLong(response.getContent()); List<Cookie> cookies = getCookies(response); request.setHeader("Cookie", "JSESSIONID=" + getSessionCookie(cookies).getValue()); Thread.sleep(1000); response = HttpTester.parseResponse(tester.getResponses(request.generate())); long time2 = Long.parseLong(response.getContent()); assertTrue("Last accessed time should be increasing across requests", time2 > time1); } /** * Test session id embedded in the URL */ @Test public void testSessionIdEmbeddedInUrl() throws Exception { Callback c = (request, response) -> { GemfireHttpSession session = (GemfireHttpSession) request.getSession(); PrintWriter out = response.getWriter(); out.write(session.getId()); }; tester.setAttribute("callback_1", c); tester.start(); request.setMethod("GET"); request.setURI("/test/hello"); request.setHeader("Host", "tester"); request.setVersion("HTTP/1.0"); response = HttpTester.parseResponse(tester.getResponses(request.generate())); List<Cookie> cookies = getCookies(response); String sessionId = response.getContent(); assertEquals("Session ids should be the same", sessionId, getSessionCookie(cookies).getValue()); request.setURI("/test/hello;jsessionid=" + sessionId); response = HttpTester.parseResponse(tester.getResponses(request.generate())); cookies = getCookies(response); assertEquals("Session ids should be the same", sessionId, getSessionCookie(cookies).getValue()); } /** * Test that request forward dispatching works */ @Test public void testDispatchingForward1() throws Exception { Callback c_1 = (request, response) -> { RequestDispatcher dispatcher = request.getRequestDispatcher("dispatch"); dispatcher.forward(request, response); // This should not appear in the output PrintWriter out = response.getWriter(); out.write("bang"); }; // This is the callback used by the forward servlet Callback c_2 = (request, response) -> { PrintWriter out = response.getWriter(); out.write("dispatched"); }; tester.setAttribute("callback_1", c_1); tester.setAttribute("callback_2", c_2); ServletHolder sh = tester.addServlet(BasicServlet.class, "/dispatch"); sh.setInitParameter("test.callback", "callback_2"); tester.start(); // ContextManager.getInstance().putContext( // sh.getServlet().getServletConfig().getServletContext()); request.setMethod("GET"); request.setURI("/test/hello"); request.setHeader("Host", "tester"); request.setVersion("HTTP/1.0"); response = HttpTester.parseResponse(tester.getResponses(request.generate())); assertEquals("dispatched", response.getContent()); } /** * Test that request include dispatching works */ @Test public void testDispatchingInclude() throws Exception { Callback c_1 = (request, response) -> { RequestDispatcher dispatcher = request.getRequestDispatcher("dispatch"); dispatcher.include(request, response); // This *should* appear in the output PrintWriter out = response.getWriter(); out.write("_bang"); }; // This is the callback used by the include servlet Callback c_2 = (request, response) -> { PrintWriter out = response.getWriter(); out.write("dispatched"); }; tester.setAttribute("callback_1", c_1); tester.setAttribute("callback_2", c_2); ServletHolder sh = tester.addServlet(BasicServlet.class, "/dispatch"); sh.setInitParameter("test.callback", "callback_2"); tester.start(); request.setMethod("GET"); request.setURI("/test/hello"); request.setHeader("Host", "tester"); request.setVersion("HTTP/1.0"); response = HttpTester.parseResponse(tester.getResponses(request.generate())); assertEquals("dispatched_bang", response.getContent()); } // @Test public void testJsp() throws Exception { tester.setResourceBase("target/test-classes"); ServletHolder jspHolder = tester.addServlet(JspServlet.class, "/test/*"); jspHolder.setInitOrder(1); jspHolder.setInitParameter("scratchdir", tmpdir.toString()); Callback c_1 = (request, response) -> { request.getSession().setAttribute("foo", "bar"); request.setAttribute("foo", "baz"); RequestDispatcher dispatcher = request.getRequestDispatcher("pagecontext.jsp"); dispatcher.forward(request, response); }; tester.getContext().setClassLoader(Thread.currentThread().getContextClassLoader()); tester.setAttribute("callback_1", c_1); tester.start(); request.setMethod("GET"); request.setURI("/test/hello"); request.setHeader("Host", "tester"); request.setVersion("HTTP/1.0"); response = HttpTester.parseResponse(tester.getResponses(request.generate())); assertEquals(200, response.getStatus()); assertEquals("baz", response.getContent().trim()); } //////////////////////////////////////////////////////////////////// // Private methods /** * Why doesn't HttpTester do this already?? */ private List<Cookie> getCookies(HttpTester.Response response) { List<Cookie> cookies = new ArrayList<>(); Enumeration e = response.getValues("Set-Cookie"); while (e != null && e.hasMoreElements()) { String header = (String) e.nextElement(); Cookie c = null; StringTokenizer st = new StringTokenizer(header, ";"); while (st.hasMoreTokens()) { String[] split = st.nextToken().split("="); String param = split[0].trim(); String value = null; if (split.length > 1) { value = split[1].trim(); } if ("version".equalsIgnoreCase(param)) { c.setVersion(Integer.parseInt(value)); } else if ("comment".equalsIgnoreCase(param)) { c.setComment(value); } else if ("domain".equalsIgnoreCase(param)) { c.setDomain(value); } else if ("max-age".equalsIgnoreCase(param)) { c.setMaxAge(Integer.parseInt(value)); } else if ("discard".equalsIgnoreCase(param)) { c.setMaxAge(-1); } else if ("path".equalsIgnoreCase(param)) { c.setPath(value); } else if ("secure".equalsIgnoreCase(param)) { c.setSecure(true); } else if ("httponly".equalsIgnoreCase(param)) { // Ignored?? } else { if (c == null) { c = new Cookie(param, value); } else { throw new IllegalStateException("Unknown cookie param: " + param); } } } if (c != null) { cookies.add(c); } } return cookies; } private Region getRegion() { // Yuck... return ((GemfireSessionManager) SessionCachingFilter.getSessionManager()).getCache().getCache() .getRegion("gemfire_modules_sessions"); } }
googleapis/google-cloud-java
35,975
java-datacatalog/proto-google-cloud-datacatalog-v1beta1/src/main/java/com/google/cloud/datacatalog/v1beta1/UpdateEntryGroupRequest.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ // Generated by the protocol buffer compiler. DO NOT EDIT! // source: google/cloud/datacatalog/v1beta1/datacatalog.proto // Protobuf Java Version: 3.25.8 package com.google.cloud.datacatalog.v1beta1; /** * * * <pre> * Request message for * [UpdateEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateEntryGroup]. * </pre> * * Protobuf type {@code google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest} */ public final class UpdateEntryGroupRequest extends com.google.protobuf.GeneratedMessageV3 implements // @@protoc_insertion_point(message_implements:google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest) UpdateEntryGroupRequestOrBuilder { private static final long serialVersionUID = 0L; // Use UpdateEntryGroupRequest.newBuilder() to construct. private UpdateEntryGroupRequest(com.google.protobuf.GeneratedMessageV3.Builder<?> builder) { super(builder); } private UpdateEntryGroupRequest() {} @java.lang.Override @SuppressWarnings({"unused"}) protected java.lang.Object newInstance(UnusedPrivateParameter unused) { return new UpdateEntryGroupRequest(); } public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datacatalog.v1beta1.Datacatalog .internal_static_google_cloud_datacatalog_v1beta1_UpdateEntryGroupRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datacatalog.v1beta1.Datacatalog .internal_static_google_cloud_datacatalog_v1beta1_UpdateEntryGroupRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest.class, com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest.Builder.class); } private int bitField0_; public static final int ENTRY_GROUP_FIELD_NUMBER = 1; private com.google.cloud.datacatalog.v1beta1.EntryGroup entryGroup_; /** * * * <pre> * Required. The updated entry group. "name" field must be set. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.EntryGroup entry_group = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the entryGroup field is set. */ @java.lang.Override public boolean hasEntryGroup() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The updated entry group. "name" field must be set. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.EntryGroup entry_group = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The entryGroup. */ @java.lang.Override public com.google.cloud.datacatalog.v1beta1.EntryGroup getEntryGroup() { return entryGroup_ == null ? com.google.cloud.datacatalog.v1beta1.EntryGroup.getDefaultInstance() : entryGroup_; } /** * * * <pre> * Required. The updated entry group. "name" field must be set. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.EntryGroup entry_group = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ @java.lang.Override public com.google.cloud.datacatalog.v1beta1.EntryGroupOrBuilder getEntryGroupOrBuilder() { return entryGroup_ == null ? com.google.cloud.datacatalog.v1beta1.EntryGroup.getDefaultInstance() : entryGroup_; } public static final int UPDATE_MASK_FIELD_NUMBER = 2; private com.google.protobuf.FieldMask updateMask_; /** * * * <pre> * Names of fields whose values to overwrite on an entry group. * * If this parameter is absent or empty, all modifiable fields * are overwritten. If such fields are non-required and omitted in the * request body, their values are emptied. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ @java.lang.Override public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Names of fields whose values to overwrite on an entry group. * * If this parameter is absent or empty, all modifiable fields * are overwritten. If such fields are non-required and omitted in the * request body, their values are emptied. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ @java.lang.Override public com.google.protobuf.FieldMask getUpdateMask() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } /** * * * <pre> * Names of fields whose values to overwrite on an entry group. * * If this parameter is absent or empty, all modifiable fields * are overwritten. If such fields are non-required and omitted in the * request body, their values are emptied. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ @java.lang.Override public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } private byte memoizedIsInitialized = -1; @java.lang.Override public final boolean isInitialized() { byte isInitialized = memoizedIsInitialized; if (isInitialized == 1) return true; if (isInitialized == 0) return false; memoizedIsInitialized = 1; return true; } @java.lang.Override public void writeTo(com.google.protobuf.CodedOutputStream output) throws java.io.IOException { if (((bitField0_ & 0x00000001) != 0)) { output.writeMessage(1, getEntryGroup()); } if (((bitField0_ & 0x00000002) != 0)) { output.writeMessage(2, getUpdateMask()); } getUnknownFields().writeTo(output); } @java.lang.Override public int getSerializedSize() { int size = memoizedSize; if (size != -1) return size; size = 0; if (((bitField0_ & 0x00000001) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(1, getEntryGroup()); } if (((bitField0_ & 0x00000002) != 0)) { size += com.google.protobuf.CodedOutputStream.computeMessageSize(2, getUpdateMask()); } size += getUnknownFields().getSerializedSize(); memoizedSize = size; return size; } @java.lang.Override public boolean equals(final java.lang.Object obj) { if (obj == this) { return true; } if (!(obj instanceof com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest)) { return super.equals(obj); } com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest other = (com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest) obj; if (hasEntryGroup() != other.hasEntryGroup()) return false; if (hasEntryGroup()) { if (!getEntryGroup().equals(other.getEntryGroup())) return false; } if (hasUpdateMask() != other.hasUpdateMask()) return false; if (hasUpdateMask()) { if (!getUpdateMask().equals(other.getUpdateMask())) return false; } if (!getUnknownFields().equals(other.getUnknownFields())) return false; return true; } @java.lang.Override public int hashCode() { if (memoizedHashCode != 0) { return memoizedHashCode; } int hash = 41; hash = (19 * hash) + getDescriptor().hashCode(); if (hasEntryGroup()) { hash = (37 * hash) + ENTRY_GROUP_FIELD_NUMBER; hash = (53 * hash) + getEntryGroup().hashCode(); } if (hasUpdateMask()) { hash = (37 * hash) + UPDATE_MASK_FIELD_NUMBER; hash = (53 * hash) + getUpdateMask().hashCode(); } hash = (29 * hash) + getUnknownFields().hashCode(); memoizedHashCode = hash; return hash; } public static com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest parseFrom( java.nio.ByteBuffer data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest parseFrom( java.nio.ByteBuffer data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest parseFrom( com.google.protobuf.ByteString data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest parseFrom( com.google.protobuf.ByteString data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest parseFrom(byte[] data) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data); } public static com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest parseFrom( byte[] data, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { return PARSER.parseFrom(data, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest parseFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest parseFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest parseDelimitedFrom( java.io.InputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest parseDelimitedFrom( java.io.InputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseDelimitedWithIOException( PARSER, input, extensionRegistry); } public static com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest parseFrom( com.google.protobuf.CodedInputStream input) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException(PARSER, input); } public static com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest parseFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { return com.google.protobuf.GeneratedMessageV3.parseWithIOException( PARSER, input, extensionRegistry); } @java.lang.Override public Builder newBuilderForType() { return newBuilder(); } public static Builder newBuilder() { return DEFAULT_INSTANCE.toBuilder(); } public static Builder newBuilder( com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest prototype) { return DEFAULT_INSTANCE.toBuilder().mergeFrom(prototype); } @java.lang.Override public Builder toBuilder() { return this == DEFAULT_INSTANCE ? new Builder() : new Builder().mergeFrom(this); } @java.lang.Override protected Builder newBuilderForType(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { Builder builder = new Builder(parent); return builder; } /** * * * <pre> * Request message for * [UpdateEntryGroup][google.cloud.datacatalog.v1beta1.DataCatalog.UpdateEntryGroup]. * </pre> * * Protobuf type {@code google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest} */ public static final class Builder extends com.google.protobuf.GeneratedMessageV3.Builder<Builder> implements // @@protoc_insertion_point(builder_implements:google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest) com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequestOrBuilder { public static final com.google.protobuf.Descriptors.Descriptor getDescriptor() { return com.google.cloud.datacatalog.v1beta1.Datacatalog .internal_static_google_cloud_datacatalog_v1beta1_UpdateEntryGroupRequest_descriptor; } @java.lang.Override protected com.google.protobuf.GeneratedMessageV3.FieldAccessorTable internalGetFieldAccessorTable() { return com.google.cloud.datacatalog.v1beta1.Datacatalog .internal_static_google_cloud_datacatalog_v1beta1_UpdateEntryGroupRequest_fieldAccessorTable .ensureFieldAccessorsInitialized( com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest.class, com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest.Builder.class); } // Construct using com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest.newBuilder() private Builder() { maybeForceBuilderInitialization(); } private Builder(com.google.protobuf.GeneratedMessageV3.BuilderParent parent) { super(parent); maybeForceBuilderInitialization(); } private void maybeForceBuilderInitialization() { if (com.google.protobuf.GeneratedMessageV3.alwaysUseFieldBuilders) { getEntryGroupFieldBuilder(); getUpdateMaskFieldBuilder(); } } @java.lang.Override public Builder clear() { super.clear(); bitField0_ = 0; entryGroup_ = null; if (entryGroupBuilder_ != null) { entryGroupBuilder_.dispose(); entryGroupBuilder_ = null; } updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } return this; } @java.lang.Override public com.google.protobuf.Descriptors.Descriptor getDescriptorForType() { return com.google.cloud.datacatalog.v1beta1.Datacatalog .internal_static_google_cloud_datacatalog_v1beta1_UpdateEntryGroupRequest_descriptor; } @java.lang.Override public com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest getDefaultInstanceForType() { return com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest.getDefaultInstance(); } @java.lang.Override public com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest build() { com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest result = buildPartial(); if (!result.isInitialized()) { throw newUninitializedMessageException(result); } return result; } @java.lang.Override public com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest buildPartial() { com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest result = new com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest(this); if (bitField0_ != 0) { buildPartial0(result); } onBuilt(); return result; } private void buildPartial0( com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest result) { int from_bitField0_ = bitField0_; int to_bitField0_ = 0; if (((from_bitField0_ & 0x00000001) != 0)) { result.entryGroup_ = entryGroupBuilder_ == null ? entryGroup_ : entryGroupBuilder_.build(); to_bitField0_ |= 0x00000001; } if (((from_bitField0_ & 0x00000002) != 0)) { result.updateMask_ = updateMaskBuilder_ == null ? updateMask_ : updateMaskBuilder_.build(); to_bitField0_ |= 0x00000002; } result.bitField0_ |= to_bitField0_; } @java.lang.Override public Builder clone() { return super.clone(); } @java.lang.Override public Builder setField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.setField(field, value); } @java.lang.Override public Builder clearField(com.google.protobuf.Descriptors.FieldDescriptor field) { return super.clearField(field); } @java.lang.Override public Builder clearOneof(com.google.protobuf.Descriptors.OneofDescriptor oneof) { return super.clearOneof(oneof); } @java.lang.Override public Builder setRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, int index, java.lang.Object value) { return super.setRepeatedField(field, index, value); } @java.lang.Override public Builder addRepeatedField( com.google.protobuf.Descriptors.FieldDescriptor field, java.lang.Object value) { return super.addRepeatedField(field, value); } @java.lang.Override public Builder mergeFrom(com.google.protobuf.Message other) { if (other instanceof com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest) { return mergeFrom((com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest) other); } else { super.mergeFrom(other); return this; } } public Builder mergeFrom(com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest other) { if (other == com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest.getDefaultInstance()) return this; if (other.hasEntryGroup()) { mergeEntryGroup(other.getEntryGroup()); } if (other.hasUpdateMask()) { mergeUpdateMask(other.getUpdateMask()); } this.mergeUnknownFields(other.getUnknownFields()); onChanged(); return this; } @java.lang.Override public final boolean isInitialized() { return true; } @java.lang.Override public Builder mergeFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws java.io.IOException { if (extensionRegistry == null) { throw new java.lang.NullPointerException(); } try { boolean done = false; while (!done) { int tag = input.readTag(); switch (tag) { case 0: done = true; break; case 10: { input.readMessage(getEntryGroupFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000001; break; } // case 10 case 18: { input.readMessage(getUpdateMaskFieldBuilder().getBuilder(), extensionRegistry); bitField0_ |= 0x00000002; break; } // case 18 default: { if (!super.parseUnknownField(input, extensionRegistry, tag)) { done = true; // was an endgroup tag } break; } // default: } // switch (tag) } // while (!done) } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.unwrapIOException(); } finally { onChanged(); } // finally return this; } private int bitField0_; private com.google.cloud.datacatalog.v1beta1.EntryGroup entryGroup_; private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.datacatalog.v1beta1.EntryGroup, com.google.cloud.datacatalog.v1beta1.EntryGroup.Builder, com.google.cloud.datacatalog.v1beta1.EntryGroupOrBuilder> entryGroupBuilder_; /** * * * <pre> * Required. The updated entry group. "name" field must be set. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.EntryGroup entry_group = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return Whether the entryGroup field is set. */ public boolean hasEntryGroup() { return ((bitField0_ & 0x00000001) != 0); } /** * * * <pre> * Required. The updated entry group. "name" field must be set. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.EntryGroup entry_group = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> * * @return The entryGroup. */ public com.google.cloud.datacatalog.v1beta1.EntryGroup getEntryGroup() { if (entryGroupBuilder_ == null) { return entryGroup_ == null ? com.google.cloud.datacatalog.v1beta1.EntryGroup.getDefaultInstance() : entryGroup_; } else { return entryGroupBuilder_.getMessage(); } } /** * * * <pre> * Required. The updated entry group. "name" field must be set. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.EntryGroup entry_group = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setEntryGroup(com.google.cloud.datacatalog.v1beta1.EntryGroup value) { if (entryGroupBuilder_ == null) { if (value == null) { throw new NullPointerException(); } entryGroup_ = value; } else { entryGroupBuilder_.setMessage(value); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The updated entry group. "name" field must be set. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.EntryGroup entry_group = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder setEntryGroup( com.google.cloud.datacatalog.v1beta1.EntryGroup.Builder builderForValue) { if (entryGroupBuilder_ == null) { entryGroup_ = builderForValue.build(); } else { entryGroupBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000001; onChanged(); return this; } /** * * * <pre> * Required. The updated entry group. "name" field must be set. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.EntryGroup entry_group = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder mergeEntryGroup(com.google.cloud.datacatalog.v1beta1.EntryGroup value) { if (entryGroupBuilder_ == null) { if (((bitField0_ & 0x00000001) != 0) && entryGroup_ != null && entryGroup_ != com.google.cloud.datacatalog.v1beta1.EntryGroup.getDefaultInstance()) { getEntryGroupBuilder().mergeFrom(value); } else { entryGroup_ = value; } } else { entryGroupBuilder_.mergeFrom(value); } if (entryGroup_ != null) { bitField0_ |= 0x00000001; onChanged(); } return this; } /** * * * <pre> * Required. The updated entry group. "name" field must be set. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.EntryGroup entry_group = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public Builder clearEntryGroup() { bitField0_ = (bitField0_ & ~0x00000001); entryGroup_ = null; if (entryGroupBuilder_ != null) { entryGroupBuilder_.dispose(); entryGroupBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Required. The updated entry group. "name" field must be set. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.EntryGroup entry_group = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.datacatalog.v1beta1.EntryGroup.Builder getEntryGroupBuilder() { bitField0_ |= 0x00000001; onChanged(); return getEntryGroupFieldBuilder().getBuilder(); } /** * * * <pre> * Required. The updated entry group. "name" field must be set. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.EntryGroup entry_group = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ public com.google.cloud.datacatalog.v1beta1.EntryGroupOrBuilder getEntryGroupOrBuilder() { if (entryGroupBuilder_ != null) { return entryGroupBuilder_.getMessageOrBuilder(); } else { return entryGroup_ == null ? com.google.cloud.datacatalog.v1beta1.EntryGroup.getDefaultInstance() : entryGroup_; } } /** * * * <pre> * Required. The updated entry group. "name" field must be set. * </pre> * * <code> * .google.cloud.datacatalog.v1beta1.EntryGroup entry_group = 1 [(.google.api.field_behavior) = REQUIRED]; * </code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.datacatalog.v1beta1.EntryGroup, com.google.cloud.datacatalog.v1beta1.EntryGroup.Builder, com.google.cloud.datacatalog.v1beta1.EntryGroupOrBuilder> getEntryGroupFieldBuilder() { if (entryGroupBuilder_ == null) { entryGroupBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.cloud.datacatalog.v1beta1.EntryGroup, com.google.cloud.datacatalog.v1beta1.EntryGroup.Builder, com.google.cloud.datacatalog.v1beta1.EntryGroupOrBuilder>( getEntryGroup(), getParentForChildren(), isClean()); entryGroup_ = null; } return entryGroupBuilder_; } private com.google.protobuf.FieldMask updateMask_; private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> updateMaskBuilder_; /** * * * <pre> * Names of fields whose values to overwrite on an entry group. * * If this parameter is absent or empty, all modifiable fields * are overwritten. If such fields are non-required and omitted in the * request body, their values are emptied. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return Whether the updateMask field is set. */ public boolean hasUpdateMask() { return ((bitField0_ & 0x00000002) != 0); } /** * * * <pre> * Names of fields whose values to overwrite on an entry group. * * If this parameter is absent or empty, all modifiable fields * are overwritten. If such fields are non-required and omitted in the * request body, their values are emptied. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> * * @return The updateMask. */ public com.google.protobuf.FieldMask getUpdateMask() { if (updateMaskBuilder_ == null) { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } else { return updateMaskBuilder_.getMessage(); } } /** * * * <pre> * Names of fields whose values to overwrite on an entry group. * * If this parameter is absent or empty, all modifiable fields * are overwritten. If such fields are non-required and omitted in the * request body, their values are emptied. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (value == null) { throw new NullPointerException(); } updateMask_ = value; } else { updateMaskBuilder_.setMessage(value); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Names of fields whose values to overwrite on an entry group. * * If this parameter is absent or empty, all modifiable fields * are overwritten. If such fields are non-required and omitted in the * request body, their values are emptied. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder setUpdateMask(com.google.protobuf.FieldMask.Builder builderForValue) { if (updateMaskBuilder_ == null) { updateMask_ = builderForValue.build(); } else { updateMaskBuilder_.setMessage(builderForValue.build()); } bitField0_ |= 0x00000002; onChanged(); return this; } /** * * * <pre> * Names of fields whose values to overwrite on an entry group. * * If this parameter is absent or empty, all modifiable fields * are overwritten. If such fields are non-required and omitted in the * request body, their values are emptied. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder mergeUpdateMask(com.google.protobuf.FieldMask value) { if (updateMaskBuilder_ == null) { if (((bitField0_ & 0x00000002) != 0) && updateMask_ != null && updateMask_ != com.google.protobuf.FieldMask.getDefaultInstance()) { getUpdateMaskBuilder().mergeFrom(value); } else { updateMask_ = value; } } else { updateMaskBuilder_.mergeFrom(value); } if (updateMask_ != null) { bitField0_ |= 0x00000002; onChanged(); } return this; } /** * * * <pre> * Names of fields whose values to overwrite on an entry group. * * If this parameter is absent or empty, all modifiable fields * are overwritten. If such fields are non-required and omitted in the * request body, their values are emptied. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public Builder clearUpdateMask() { bitField0_ = (bitField0_ & ~0x00000002); updateMask_ = null; if (updateMaskBuilder_ != null) { updateMaskBuilder_.dispose(); updateMaskBuilder_ = null; } onChanged(); return this; } /** * * * <pre> * Names of fields whose values to overwrite on an entry group. * * If this parameter is absent or empty, all modifiable fields * are overwritten. If such fields are non-required and omitted in the * request body, their values are emptied. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMask.Builder getUpdateMaskBuilder() { bitField0_ |= 0x00000002; onChanged(); return getUpdateMaskFieldBuilder().getBuilder(); } /** * * * <pre> * Names of fields whose values to overwrite on an entry group. * * If this parameter is absent or empty, all modifiable fields * are overwritten. If such fields are non-required and omitted in the * request body, their values are emptied. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ public com.google.protobuf.FieldMaskOrBuilder getUpdateMaskOrBuilder() { if (updateMaskBuilder_ != null) { return updateMaskBuilder_.getMessageOrBuilder(); } else { return updateMask_ == null ? com.google.protobuf.FieldMask.getDefaultInstance() : updateMask_; } } /** * * * <pre> * Names of fields whose values to overwrite on an entry group. * * If this parameter is absent or empty, all modifiable fields * are overwritten. If such fields are non-required and omitted in the * request body, their values are emptied. * </pre> * * <code>.google.protobuf.FieldMask update_mask = 2;</code> */ private com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder> getUpdateMaskFieldBuilder() { if (updateMaskBuilder_ == null) { updateMaskBuilder_ = new com.google.protobuf.SingleFieldBuilderV3< com.google.protobuf.FieldMask, com.google.protobuf.FieldMask.Builder, com.google.protobuf.FieldMaskOrBuilder>( getUpdateMask(), getParentForChildren(), isClean()); updateMask_ = null; } return updateMaskBuilder_; } @java.lang.Override public final Builder setUnknownFields(final com.google.protobuf.UnknownFieldSet unknownFields) { return super.setUnknownFields(unknownFields); } @java.lang.Override public final Builder mergeUnknownFields( final com.google.protobuf.UnknownFieldSet unknownFields) { return super.mergeUnknownFields(unknownFields); } // @@protoc_insertion_point(builder_scope:google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest) } // @@protoc_insertion_point(class_scope:google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest) private static final com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest DEFAULT_INSTANCE; static { DEFAULT_INSTANCE = new com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest(); } public static com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest getDefaultInstance() { return DEFAULT_INSTANCE; } private static final com.google.protobuf.Parser<UpdateEntryGroupRequest> PARSER = new com.google.protobuf.AbstractParser<UpdateEntryGroupRequest>() { @java.lang.Override public UpdateEntryGroupRequest parsePartialFrom( com.google.protobuf.CodedInputStream input, com.google.protobuf.ExtensionRegistryLite extensionRegistry) throws com.google.protobuf.InvalidProtocolBufferException { Builder builder = newBuilder(); try { builder.mergeFrom(input, extensionRegistry); } catch (com.google.protobuf.InvalidProtocolBufferException e) { throw e.setUnfinishedMessage(builder.buildPartial()); } catch (com.google.protobuf.UninitializedMessageException e) { throw e.asInvalidProtocolBufferException().setUnfinishedMessage(builder.buildPartial()); } catch (java.io.IOException e) { throw new com.google.protobuf.InvalidProtocolBufferException(e) .setUnfinishedMessage(builder.buildPartial()); } return builder.buildPartial(); } }; public static com.google.protobuf.Parser<UpdateEntryGroupRequest> parser() { return PARSER; } @java.lang.Override public com.google.protobuf.Parser<UpdateEntryGroupRequest> getParserForType() { return PARSER; } @java.lang.Override public com.google.cloud.datacatalog.v1beta1.UpdateEntryGroupRequest getDefaultInstanceForType() { return DEFAULT_INSTANCE; } }
googleapis/google-cloud-java
36,158
java-shopping-merchant-accounts/grpc-google-shopping-merchant-accounts-v1/src/main/java/com/google/shopping/merchant/accounts/v1/CheckoutSettingsServiceGrpc.java
/* * Copyright 2025 Google LLC * * Licensed under the Apache License, Version 2.0 (the "License"); * you may not use this file except in compliance with the License. * You may obtain a copy of the License at * * https://www.apache.org/licenses/LICENSE-2.0 * * Unless required by applicable law or agreed to in writing, software * distributed under the License is distributed on an "AS IS" BASIS, * WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. * See the License for the specific language governing permissions and * limitations under the License. */ package com.google.shopping.merchant.accounts.v1; import static io.grpc.MethodDescriptor.generateFullMethodName; /** * * * <pre> * Service for supporting [checkout * settings](https://support.google.com/merchants/answer/13945960). * </pre> */ @javax.annotation.Generated( value = "by gRPC proto compiler", comments = "Source: google/shopping/merchant/accounts/v1/checkoutsettings.proto") @io.grpc.stub.annotations.GrpcGenerated public final class CheckoutSettingsServiceGrpc { private CheckoutSettingsServiceGrpc() {} public static final java.lang.String SERVICE_NAME = "google.shopping.merchant.accounts.v1.CheckoutSettingsService"; // Static method descriptors that strictly reflect the proto. private static volatile io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.GetCheckoutSettingsRequest, com.google.shopping.merchant.accounts.v1.CheckoutSettings> getGetCheckoutSettingsMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "GetCheckoutSettings", requestType = com.google.shopping.merchant.accounts.v1.GetCheckoutSettingsRequest.class, responseType = com.google.shopping.merchant.accounts.v1.CheckoutSettings.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.GetCheckoutSettingsRequest, com.google.shopping.merchant.accounts.v1.CheckoutSettings> getGetCheckoutSettingsMethod() { io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.GetCheckoutSettingsRequest, com.google.shopping.merchant.accounts.v1.CheckoutSettings> getGetCheckoutSettingsMethod; if ((getGetCheckoutSettingsMethod = CheckoutSettingsServiceGrpc.getGetCheckoutSettingsMethod) == null) { synchronized (CheckoutSettingsServiceGrpc.class) { if ((getGetCheckoutSettingsMethod = CheckoutSettingsServiceGrpc.getGetCheckoutSettingsMethod) == null) { CheckoutSettingsServiceGrpc.getGetCheckoutSettingsMethod = getGetCheckoutSettingsMethod = io.grpc.MethodDescriptor .<com.google.shopping.merchant.accounts.v1.GetCheckoutSettingsRequest, com.google.shopping.merchant.accounts.v1.CheckoutSettings> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName( generateFullMethodName(SERVICE_NAME, "GetCheckoutSettings")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.GetCheckoutSettingsRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.CheckoutSettings .getDefaultInstance())) .setSchemaDescriptor( new CheckoutSettingsServiceMethodDescriptorSupplier( "GetCheckoutSettings")) .build(); } } } return getGetCheckoutSettingsMethod; } private static volatile io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.CreateCheckoutSettingsRequest, com.google.shopping.merchant.accounts.v1.CheckoutSettings> getCreateCheckoutSettingsMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "CreateCheckoutSettings", requestType = com.google.shopping.merchant.accounts.v1.CreateCheckoutSettingsRequest.class, responseType = com.google.shopping.merchant.accounts.v1.CheckoutSettings.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.CreateCheckoutSettingsRequest, com.google.shopping.merchant.accounts.v1.CheckoutSettings> getCreateCheckoutSettingsMethod() { io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.CreateCheckoutSettingsRequest, com.google.shopping.merchant.accounts.v1.CheckoutSettings> getCreateCheckoutSettingsMethod; if ((getCreateCheckoutSettingsMethod = CheckoutSettingsServiceGrpc.getCreateCheckoutSettingsMethod) == null) { synchronized (CheckoutSettingsServiceGrpc.class) { if ((getCreateCheckoutSettingsMethod = CheckoutSettingsServiceGrpc.getCreateCheckoutSettingsMethod) == null) { CheckoutSettingsServiceGrpc.getCreateCheckoutSettingsMethod = getCreateCheckoutSettingsMethod = io.grpc.MethodDescriptor .<com.google.shopping.merchant.accounts.v1.CreateCheckoutSettingsRequest, com.google.shopping.merchant.accounts.v1.CheckoutSettings> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName( generateFullMethodName(SERVICE_NAME, "CreateCheckoutSettings")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.CreateCheckoutSettingsRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.CheckoutSettings .getDefaultInstance())) .setSchemaDescriptor( new CheckoutSettingsServiceMethodDescriptorSupplier( "CreateCheckoutSettings")) .build(); } } } return getCreateCheckoutSettingsMethod; } private static volatile io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.UpdateCheckoutSettingsRequest, com.google.shopping.merchant.accounts.v1.CheckoutSettings> getUpdateCheckoutSettingsMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "UpdateCheckoutSettings", requestType = com.google.shopping.merchant.accounts.v1.UpdateCheckoutSettingsRequest.class, responseType = com.google.shopping.merchant.accounts.v1.CheckoutSettings.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.UpdateCheckoutSettingsRequest, com.google.shopping.merchant.accounts.v1.CheckoutSettings> getUpdateCheckoutSettingsMethod() { io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.UpdateCheckoutSettingsRequest, com.google.shopping.merchant.accounts.v1.CheckoutSettings> getUpdateCheckoutSettingsMethod; if ((getUpdateCheckoutSettingsMethod = CheckoutSettingsServiceGrpc.getUpdateCheckoutSettingsMethod) == null) { synchronized (CheckoutSettingsServiceGrpc.class) { if ((getUpdateCheckoutSettingsMethod = CheckoutSettingsServiceGrpc.getUpdateCheckoutSettingsMethod) == null) { CheckoutSettingsServiceGrpc.getUpdateCheckoutSettingsMethod = getUpdateCheckoutSettingsMethod = io.grpc.MethodDescriptor .<com.google.shopping.merchant.accounts.v1.UpdateCheckoutSettingsRequest, com.google.shopping.merchant.accounts.v1.CheckoutSettings> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName( generateFullMethodName(SERVICE_NAME, "UpdateCheckoutSettings")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.UpdateCheckoutSettingsRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.CheckoutSettings .getDefaultInstance())) .setSchemaDescriptor( new CheckoutSettingsServiceMethodDescriptorSupplier( "UpdateCheckoutSettings")) .build(); } } } return getUpdateCheckoutSettingsMethod; } private static volatile io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.DeleteCheckoutSettingsRequest, com.google.protobuf.Empty> getDeleteCheckoutSettingsMethod; @io.grpc.stub.annotations.RpcMethod( fullMethodName = SERVICE_NAME + '/' + "DeleteCheckoutSettings", requestType = com.google.shopping.merchant.accounts.v1.DeleteCheckoutSettingsRequest.class, responseType = com.google.protobuf.Empty.class, methodType = io.grpc.MethodDescriptor.MethodType.UNARY) public static io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.DeleteCheckoutSettingsRequest, com.google.protobuf.Empty> getDeleteCheckoutSettingsMethod() { io.grpc.MethodDescriptor< com.google.shopping.merchant.accounts.v1.DeleteCheckoutSettingsRequest, com.google.protobuf.Empty> getDeleteCheckoutSettingsMethod; if ((getDeleteCheckoutSettingsMethod = CheckoutSettingsServiceGrpc.getDeleteCheckoutSettingsMethod) == null) { synchronized (CheckoutSettingsServiceGrpc.class) { if ((getDeleteCheckoutSettingsMethod = CheckoutSettingsServiceGrpc.getDeleteCheckoutSettingsMethod) == null) { CheckoutSettingsServiceGrpc.getDeleteCheckoutSettingsMethod = getDeleteCheckoutSettingsMethod = io.grpc.MethodDescriptor .<com.google.shopping.merchant.accounts.v1.DeleteCheckoutSettingsRequest, com.google.protobuf.Empty> newBuilder() .setType(io.grpc.MethodDescriptor.MethodType.UNARY) .setFullMethodName( generateFullMethodName(SERVICE_NAME, "DeleteCheckoutSettings")) .setSampledToLocalTracing(true) .setRequestMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.shopping.merchant.accounts.v1.DeleteCheckoutSettingsRequest .getDefaultInstance())) .setResponseMarshaller( io.grpc.protobuf.ProtoUtils.marshaller( com.google.protobuf.Empty.getDefaultInstance())) .setSchemaDescriptor( new CheckoutSettingsServiceMethodDescriptorSupplier( "DeleteCheckoutSettings")) .build(); } } } return getDeleteCheckoutSettingsMethod; } /** Creates a new async stub that supports all call types for the service */ public static CheckoutSettingsServiceStub newStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<CheckoutSettingsServiceStub> factory = new io.grpc.stub.AbstractStub.StubFactory<CheckoutSettingsServiceStub>() { @java.lang.Override public CheckoutSettingsServiceStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new CheckoutSettingsServiceStub(channel, callOptions); } }; return CheckoutSettingsServiceStub.newStub(factory, channel); } /** Creates a new blocking-style stub that supports all types of calls on the service */ public static CheckoutSettingsServiceBlockingV2Stub newBlockingV2Stub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<CheckoutSettingsServiceBlockingV2Stub> factory = new io.grpc.stub.AbstractStub.StubFactory<CheckoutSettingsServiceBlockingV2Stub>() { @java.lang.Override public CheckoutSettingsServiceBlockingV2Stub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new CheckoutSettingsServiceBlockingV2Stub(channel, callOptions); } }; return CheckoutSettingsServiceBlockingV2Stub.newStub(factory, channel); } /** * Creates a new blocking-style stub that supports unary and streaming output calls on the service */ public static CheckoutSettingsServiceBlockingStub newBlockingStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<CheckoutSettingsServiceBlockingStub> factory = new io.grpc.stub.AbstractStub.StubFactory<CheckoutSettingsServiceBlockingStub>() { @java.lang.Override public CheckoutSettingsServiceBlockingStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new CheckoutSettingsServiceBlockingStub(channel, callOptions); } }; return CheckoutSettingsServiceBlockingStub.newStub(factory, channel); } /** Creates a new ListenableFuture-style stub that supports unary calls on the service */ public static CheckoutSettingsServiceFutureStub newFutureStub(io.grpc.Channel channel) { io.grpc.stub.AbstractStub.StubFactory<CheckoutSettingsServiceFutureStub> factory = new io.grpc.stub.AbstractStub.StubFactory<CheckoutSettingsServiceFutureStub>() { @java.lang.Override public CheckoutSettingsServiceFutureStub newStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new CheckoutSettingsServiceFutureStub(channel, callOptions); } }; return CheckoutSettingsServiceFutureStub.newStub(factory, channel); } /** * * * <pre> * Service for supporting [checkout * settings](https://support.google.com/merchants/answer/13945960). * </pre> */ public interface AsyncService { /** * * * <pre> * Gets `CheckoutSettings` for the given merchant. This includes * information about review state, enrollment state and URL settings. * </pre> */ default void getCheckoutSettings( com.google.shopping.merchant.accounts.v1.GetCheckoutSettingsRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.CheckoutSettings> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getGetCheckoutSettingsMethod(), responseObserver); } /** * * * <pre> * Creates `CheckoutSettings` for the given merchant. * </pre> */ default void createCheckoutSettings( com.google.shopping.merchant.accounts.v1.CreateCheckoutSettingsRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.CheckoutSettings> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getCreateCheckoutSettingsMethod(), responseObserver); } /** * * * <pre> * Updates `CheckoutSettings` for the given merchant. * </pre> */ default void updateCheckoutSettings( com.google.shopping.merchant.accounts.v1.UpdateCheckoutSettingsRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.CheckoutSettings> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getUpdateCheckoutSettingsMethod(), responseObserver); } /** * * * <pre> * Deletes `CheckoutSettings` and unenrolls merchant from * `Checkout` program. * </pre> */ default void deleteCheckoutSettings( com.google.shopping.merchant.accounts.v1.DeleteCheckoutSettingsRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) { io.grpc.stub.ServerCalls.asyncUnimplementedUnaryCall( getDeleteCheckoutSettingsMethod(), responseObserver); } } /** * Base class for the server implementation of the service CheckoutSettingsService. * * <pre> * Service for supporting [checkout * settings](https://support.google.com/merchants/answer/13945960). * </pre> */ public abstract static class CheckoutSettingsServiceImplBase implements io.grpc.BindableService, AsyncService { @java.lang.Override public final io.grpc.ServerServiceDefinition bindService() { return CheckoutSettingsServiceGrpc.bindService(this); } } /** * A stub to allow clients to do asynchronous rpc calls to service CheckoutSettingsService. * * <pre> * Service for supporting [checkout * settings](https://support.google.com/merchants/answer/13945960). * </pre> */ public static final class CheckoutSettingsServiceStub extends io.grpc.stub.AbstractAsyncStub<CheckoutSettingsServiceStub> { private CheckoutSettingsServiceStub(io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected CheckoutSettingsServiceStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new CheckoutSettingsServiceStub(channel, callOptions); } /** * * * <pre> * Gets `CheckoutSettings` for the given merchant. This includes * information about review state, enrollment state and URL settings. * </pre> */ public void getCheckoutSettings( com.google.shopping.merchant.accounts.v1.GetCheckoutSettingsRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.CheckoutSettings> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getGetCheckoutSettingsMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Creates `CheckoutSettings` for the given merchant. * </pre> */ public void createCheckoutSettings( com.google.shopping.merchant.accounts.v1.CreateCheckoutSettingsRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.CheckoutSettings> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getCreateCheckoutSettingsMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Updates `CheckoutSettings` for the given merchant. * </pre> */ public void updateCheckoutSettings( com.google.shopping.merchant.accounts.v1.UpdateCheckoutSettingsRequest request, io.grpc.stub.StreamObserver<com.google.shopping.merchant.accounts.v1.CheckoutSettings> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getUpdateCheckoutSettingsMethod(), getCallOptions()), request, responseObserver); } /** * * * <pre> * Deletes `CheckoutSettings` and unenrolls merchant from * `Checkout` program. * </pre> */ public void deleteCheckoutSettings( com.google.shopping.merchant.accounts.v1.DeleteCheckoutSettingsRequest request, io.grpc.stub.StreamObserver<com.google.protobuf.Empty> responseObserver) { io.grpc.stub.ClientCalls.asyncUnaryCall( getChannel().newCall(getDeleteCheckoutSettingsMethod(), getCallOptions()), request, responseObserver); } } /** * A stub to allow clients to do synchronous rpc calls to service CheckoutSettingsService. * * <pre> * Service for supporting [checkout * settings](https://support.google.com/merchants/answer/13945960). * </pre> */ public static final class CheckoutSettingsServiceBlockingV2Stub extends io.grpc.stub.AbstractBlockingStub<CheckoutSettingsServiceBlockingV2Stub> { private CheckoutSettingsServiceBlockingV2Stub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected CheckoutSettingsServiceBlockingV2Stub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new CheckoutSettingsServiceBlockingV2Stub(channel, callOptions); } /** * * * <pre> * Gets `CheckoutSettings` for the given merchant. This includes * information about review state, enrollment state and URL settings. * </pre> */ public com.google.shopping.merchant.accounts.v1.CheckoutSettings getCheckoutSettings( com.google.shopping.merchant.accounts.v1.GetCheckoutSettingsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetCheckoutSettingsMethod(), getCallOptions(), request); } /** * * * <pre> * Creates `CheckoutSettings` for the given merchant. * </pre> */ public com.google.shopping.merchant.accounts.v1.CheckoutSettings createCheckoutSettings( com.google.shopping.merchant.accounts.v1.CreateCheckoutSettingsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getCreateCheckoutSettingsMethod(), getCallOptions(), request); } /** * * * <pre> * Updates `CheckoutSettings` for the given merchant. * </pre> */ public com.google.shopping.merchant.accounts.v1.CheckoutSettings updateCheckoutSettings( com.google.shopping.merchant.accounts.v1.UpdateCheckoutSettingsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getUpdateCheckoutSettingsMethod(), getCallOptions(), request); } /** * * * <pre> * Deletes `CheckoutSettings` and unenrolls merchant from * `Checkout` program. * </pre> */ public com.google.protobuf.Empty deleteCheckoutSettings( com.google.shopping.merchant.accounts.v1.DeleteCheckoutSettingsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDeleteCheckoutSettingsMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do limited synchronous rpc calls to service CheckoutSettingsService. * * <pre> * Service for supporting [checkout * settings](https://support.google.com/merchants/answer/13945960). * </pre> */ public static final class CheckoutSettingsServiceBlockingStub extends io.grpc.stub.AbstractBlockingStub<CheckoutSettingsServiceBlockingStub> { private CheckoutSettingsServiceBlockingStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected CheckoutSettingsServiceBlockingStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new CheckoutSettingsServiceBlockingStub(channel, callOptions); } /** * * * <pre> * Gets `CheckoutSettings` for the given merchant. This includes * information about review state, enrollment state and URL settings. * </pre> */ public com.google.shopping.merchant.accounts.v1.CheckoutSettings getCheckoutSettings( com.google.shopping.merchant.accounts.v1.GetCheckoutSettingsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getGetCheckoutSettingsMethod(), getCallOptions(), request); } /** * * * <pre> * Creates `CheckoutSettings` for the given merchant. * </pre> */ public com.google.shopping.merchant.accounts.v1.CheckoutSettings createCheckoutSettings( com.google.shopping.merchant.accounts.v1.CreateCheckoutSettingsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getCreateCheckoutSettingsMethod(), getCallOptions(), request); } /** * * * <pre> * Updates `CheckoutSettings` for the given merchant. * </pre> */ public com.google.shopping.merchant.accounts.v1.CheckoutSettings updateCheckoutSettings( com.google.shopping.merchant.accounts.v1.UpdateCheckoutSettingsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getUpdateCheckoutSettingsMethod(), getCallOptions(), request); } /** * * * <pre> * Deletes `CheckoutSettings` and unenrolls merchant from * `Checkout` program. * </pre> */ public com.google.protobuf.Empty deleteCheckoutSettings( com.google.shopping.merchant.accounts.v1.DeleteCheckoutSettingsRequest request) { return io.grpc.stub.ClientCalls.blockingUnaryCall( getChannel(), getDeleteCheckoutSettingsMethod(), getCallOptions(), request); } } /** * A stub to allow clients to do ListenableFuture-style rpc calls to service * CheckoutSettingsService. * * <pre> * Service for supporting [checkout * settings](https://support.google.com/merchants/answer/13945960). * </pre> */ public static final class CheckoutSettingsServiceFutureStub extends io.grpc.stub.AbstractFutureStub<CheckoutSettingsServiceFutureStub> { private CheckoutSettingsServiceFutureStub( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { super(channel, callOptions); } @java.lang.Override protected CheckoutSettingsServiceFutureStub build( io.grpc.Channel channel, io.grpc.CallOptions callOptions) { return new CheckoutSettingsServiceFutureStub(channel, callOptions); } /** * * * <pre> * Gets `CheckoutSettings` for the given merchant. This includes * information about review state, enrollment state and URL settings. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.shopping.merchant.accounts.v1.CheckoutSettings> getCheckoutSettings( com.google.shopping.merchant.accounts.v1.GetCheckoutSettingsRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getGetCheckoutSettingsMethod(), getCallOptions()), request); } /** * * * <pre> * Creates `CheckoutSettings` for the given merchant. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.shopping.merchant.accounts.v1.CheckoutSettings> createCheckoutSettings( com.google.shopping.merchant.accounts.v1.CreateCheckoutSettingsRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getCreateCheckoutSettingsMethod(), getCallOptions()), request); } /** * * * <pre> * Updates `CheckoutSettings` for the given merchant. * </pre> */ public com.google.common.util.concurrent.ListenableFuture< com.google.shopping.merchant.accounts.v1.CheckoutSettings> updateCheckoutSettings( com.google.shopping.merchant.accounts.v1.UpdateCheckoutSettingsRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getUpdateCheckoutSettingsMethod(), getCallOptions()), request); } /** * * * <pre> * Deletes `CheckoutSettings` and unenrolls merchant from * `Checkout` program. * </pre> */ public com.google.common.util.concurrent.ListenableFuture<com.google.protobuf.Empty> deleteCheckoutSettings( com.google.shopping.merchant.accounts.v1.DeleteCheckoutSettingsRequest request) { return io.grpc.stub.ClientCalls.futureUnaryCall( getChannel().newCall(getDeleteCheckoutSettingsMethod(), getCallOptions()), request); } } private static final int METHODID_GET_CHECKOUT_SETTINGS = 0; private static final int METHODID_CREATE_CHECKOUT_SETTINGS = 1; private static final int METHODID_UPDATE_CHECKOUT_SETTINGS = 2; private static final int METHODID_DELETE_CHECKOUT_SETTINGS = 3; private static final class MethodHandlers<Req, Resp> implements io.grpc.stub.ServerCalls.UnaryMethod<Req, Resp>, io.grpc.stub.ServerCalls.ServerStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.ClientStreamingMethod<Req, Resp>, io.grpc.stub.ServerCalls.BidiStreamingMethod<Req, Resp> { private final AsyncService serviceImpl; private final int methodId; MethodHandlers(AsyncService serviceImpl, int methodId) { this.serviceImpl = serviceImpl; this.methodId = methodId; } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public void invoke(Req request, io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { case METHODID_GET_CHECKOUT_SETTINGS: serviceImpl.getCheckoutSettings( (com.google.shopping.merchant.accounts.v1.GetCheckoutSettingsRequest) request, (io.grpc.stub.StreamObserver< com.google.shopping.merchant.accounts.v1.CheckoutSettings>) responseObserver); break; case METHODID_CREATE_CHECKOUT_SETTINGS: serviceImpl.createCheckoutSettings( (com.google.shopping.merchant.accounts.v1.CreateCheckoutSettingsRequest) request, (io.grpc.stub.StreamObserver< com.google.shopping.merchant.accounts.v1.CheckoutSettings>) responseObserver); break; case METHODID_UPDATE_CHECKOUT_SETTINGS: serviceImpl.updateCheckoutSettings( (com.google.shopping.merchant.accounts.v1.UpdateCheckoutSettingsRequest) request, (io.grpc.stub.StreamObserver< com.google.shopping.merchant.accounts.v1.CheckoutSettings>) responseObserver); break; case METHODID_DELETE_CHECKOUT_SETTINGS: serviceImpl.deleteCheckoutSettings( (com.google.shopping.merchant.accounts.v1.DeleteCheckoutSettingsRequest) request, (io.grpc.stub.StreamObserver<com.google.protobuf.Empty>) responseObserver); break; default: throw new AssertionError(); } } @java.lang.Override @java.lang.SuppressWarnings("unchecked") public io.grpc.stub.StreamObserver<Req> invoke( io.grpc.stub.StreamObserver<Resp> responseObserver) { switch (methodId) { default: throw new AssertionError(); } } } public static final io.grpc.ServerServiceDefinition bindService(AsyncService service) { return io.grpc.ServerServiceDefinition.builder(getServiceDescriptor()) .addMethod( getGetCheckoutSettingsMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.shopping.merchant.accounts.v1.GetCheckoutSettingsRequest, com.google.shopping.merchant.accounts.v1.CheckoutSettings>( service, METHODID_GET_CHECKOUT_SETTINGS))) .addMethod( getCreateCheckoutSettingsMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.shopping.merchant.accounts.v1.CreateCheckoutSettingsRequest, com.google.shopping.merchant.accounts.v1.CheckoutSettings>( service, METHODID_CREATE_CHECKOUT_SETTINGS))) .addMethod( getUpdateCheckoutSettingsMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.shopping.merchant.accounts.v1.UpdateCheckoutSettingsRequest, com.google.shopping.merchant.accounts.v1.CheckoutSettings>( service, METHODID_UPDATE_CHECKOUT_SETTINGS))) .addMethod( getDeleteCheckoutSettingsMethod(), io.grpc.stub.ServerCalls.asyncUnaryCall( new MethodHandlers< com.google.shopping.merchant.accounts.v1.DeleteCheckoutSettingsRequest, com.google.protobuf.Empty>(service, METHODID_DELETE_CHECKOUT_SETTINGS))) .build(); } private abstract static class CheckoutSettingsServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoFileDescriptorSupplier, io.grpc.protobuf.ProtoServiceDescriptorSupplier { CheckoutSettingsServiceBaseDescriptorSupplier() {} @java.lang.Override public com.google.protobuf.Descriptors.FileDescriptor getFileDescriptor() { return com.google.shopping.merchant.accounts.v1.CheckoutsettingsProto.getDescriptor(); } @java.lang.Override public com.google.protobuf.Descriptors.ServiceDescriptor getServiceDescriptor() { return getFileDescriptor().findServiceByName("CheckoutSettingsService"); } } private static final class CheckoutSettingsServiceFileDescriptorSupplier extends CheckoutSettingsServiceBaseDescriptorSupplier { CheckoutSettingsServiceFileDescriptorSupplier() {} } private static final class CheckoutSettingsServiceMethodDescriptorSupplier extends CheckoutSettingsServiceBaseDescriptorSupplier implements io.grpc.protobuf.ProtoMethodDescriptorSupplier { private final java.lang.String methodName; CheckoutSettingsServiceMethodDescriptorSupplier(java.lang.String methodName) { this.methodName = methodName; } @java.lang.Override public com.google.protobuf.Descriptors.MethodDescriptor getMethodDescriptor() { return getServiceDescriptor().findMethodByName(methodName); } } private static volatile io.grpc.ServiceDescriptor serviceDescriptor; public static io.grpc.ServiceDescriptor getServiceDescriptor() { io.grpc.ServiceDescriptor result = serviceDescriptor; if (result == null) { synchronized (CheckoutSettingsServiceGrpc.class) { result = serviceDescriptor; if (result == null) { serviceDescriptor = result = io.grpc.ServiceDescriptor.newBuilder(SERVICE_NAME) .setSchemaDescriptor(new CheckoutSettingsServiceFileDescriptorSupplier()) .addMethod(getGetCheckoutSettingsMethod()) .addMethod(getCreateCheckoutSettingsMethod()) .addMethod(getUpdateCheckoutSettingsMethod()) .addMethod(getDeleteCheckoutSettingsMethod()) .build(); } } } return result; } }